diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 0000000..5aaadf4 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index aa4e42e..2e38496 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -1,14 +1,39 @@ { - "name": "docker-slim-action", - "version": "1.0.0", - "lockfileVersion": 2, + "name": "@kitabisa/docker-slim-action", + "version": "1.0.3", + "lockfileVersion": 3, "requires": true, "packages": { + "node_modules/@actions/cache": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz", + "integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "semver": "^6.3.1", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@actions/core": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", - "dev": true, "dependencies": { "@actions/http-client": "^2.0.1", "uuid": "^8.3.2" @@ -18,32 +43,275 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", - "dev": true, "dependencies": { "@actions/io": "^1.0.1" } }, + "node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "node_modules/@actions/http-client": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", - "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", - "dev": true, + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", + "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", "dependencies": { - "tunnel": "^0.0.6" + "tunnel": "^0.0.6", + "undici": "^5.25.4" } }, "node_modules/@actions/io": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz", - "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==", - "dev": true + "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==" + }, + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.7.1.tgz", + "integrity": "sha512-dyeQwvgthqs/SlPVQbZQetpslXceHd4i5a7M/7z/lGEAVwnSluabnQOjF2/dk/hhWgMISusv1Ytp4mQ8JNy62A==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-http": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.4.tgz", + "integrity": "sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-util": "^1.1.1", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/core-http/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.1.tgz", + "integrity": "sha512-kXSlrNHOCTVZMxpXNRqzgh9/j4cnNXU5Hf2YjMyjddRhCXFiFRzmNaqwN+XO9rGTsCOIaaG7M67zZdyliXZG9g==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.1.tgz", + "integrity": "sha512-3tKIQXSU3mlN+ITz0m2pXLnKK3oQ6/EVcW8ud011Iq+M0rx6Wnm7NUEpoMeOAEedeKlPtemrQzO6YWoDR71O5w==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.8.1.tgz", + "integrity": "sha512-L3voj0StUdJ+YKomvwnTv7gHzguJO+a6h30pmmZdRprJCM+RJlGMPxzuh4R7lhQu1jNmEtaHX5wvTgWLDAmbGQ==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util/node_modules/@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/logger": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.1.tgz", + "integrity": "sha512-/+4TtokaGgC+MnThdf6HyIH9Wrjp+CnCn3Nx3ggevN7FFjjNyjqg0yLlc2i9S+Z2uAzI8GYOo35Nzb1MhQ89MA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.5.0" + } + }, + "node_modules/@azure/ms-rest-js/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@azure/storage-blob": { + "version": "12.17.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.17.0.tgz", + "integrity": "sha512-sM4vpsCpcCApagRW5UIjQNlNylo02my2opgp0Emi8x888hZUvJ3dN69Oq20cEGXkMUWnoCrBaB0zyS3yeB87sQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^3.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.8.0.tgz", + "integrity": "sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==", + "engines": { + "node": ">=8.0.0" + } }, "node_modules/@types/node": { "version": "18.14.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", - "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==", - "dev": true, - "optional": true + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" + }, + "node_modules/@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "dependencies": { + "@types/node": "*" + } }, "node_modules/@types/yauzl": { "version": "2.10.0", @@ -55,6 +323,36 @@ "@types/node": "*" } }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -73,6 +371,22 @@ "node": ">=10" } }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -90,6 +404,14 @@ } } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -99,6 +421,22 @@ "once": "^1.4.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/extract-zip": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", @@ -128,6 +466,19 @@ "pend": "~1.2.0" } }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, "node_modules/fs": { "version": "0.0.1-security", "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", @@ -185,6 +536,36 @@ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", "dev": true }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minipass": { "version": "4.2.4", "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz", @@ -237,6 +618,25 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -272,7 +672,6 @@ "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, "engines": { "node": ">= 0.6.0" } @@ -287,6 +686,19 @@ "once": "^1.3.1" } }, + "node_modules/sax": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", + "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/tar": { "version": "6.1.13", "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", @@ -304,15 +716,35 @@ "node": ">=10" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "dev": true, "engines": { "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, + "node_modules/undici": { + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, "node_modules/util": { "version": "0.10.4", "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", @@ -326,17 +758,50 @@ "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, "bin": { "uuid": "dist/bin/uuid" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/node_modules/@actions/cache/LICENSE.md b/node_modules/@actions/cache/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/node_modules/@actions/cache/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/cache/README.md b/node_modules/@actions/cache/README.md new file mode 100644 index 0000000..5518503 --- /dev/null +++ b/node_modules/@actions/cache/README.md @@ -0,0 +1,51 @@ +# `@actions/cache` + +> Functions necessary for caching dependencies and build outputs to improve workflow execution time. + +See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows) for how caching works. + +Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB. + +## Usage + +This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). + +#### Save Cache + +Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const cacheId = await cache.saveCache(paths, key) +``` + +#### Restore Cache + +Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const restoreKeys = [ + 'npm-foobar-', + 'npm-' +] +const cacheKey = await cache.restoreCache(paths, key, restoreKeys) +``` + +##### Cache segment restore timeout + +A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss. + +Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes. + + diff --git a/node_modules/@actions/cache/lib/cache.d.ts b/node_modules/@actions/cache/lib/cache.d.ts new file mode 100644 index 0000000..4658366 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.d.ts @@ -0,0 +1,34 @@ +import { DownloadOptions, UploadOptions } from './options'; +export declare class ValidationError extends Error { + constructor(message: string); +} +export declare class ReserveCacheError extends Error { + constructor(message: string); +} +/** + * isFeatureAvailable to check the presence of Actions cache service + * + * @returns boolean return true if Actions cache service feature is available, otherwise false + */ +export declare function isFeatureAvailable(): boolean; +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive?: boolean): Promise; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +export declare function saveCache(paths: string[], key: string, options?: UploadOptions, enableCrossOsArchive?: boolean): Promise; diff --git a/node_modules/@actions/cache/lib/cache.js b/node_modules/@actions/cache/lib/cache.js new file mode 100644 index 0000000..9d636aa --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.js @@ -0,0 +1,235 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; +const core = __importStar(require("@actions/core")); +const path = __importStar(require("path")); +const utils = __importStar(require("./internal/cacheUtils")); +const cacheHttpClient = __importStar(require("./internal/cacheHttpClient")); +const tar_1 = require("./internal/tar"); +class ValidationError extends Error { + constructor(message) { + super(message); + this.name = 'ValidationError'; + Object.setPrototypeOf(this, ValidationError.prototype); + } +} +exports.ValidationError = ValidationError; +class ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = 'ReserveCacheError'; + Object.setPrototypeOf(this, ReserveCacheError.prototype); + } +} +exports.ReserveCacheError = ReserveCacheError; +function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + } +} +function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + } +} +/** + * isFeatureAvailable to check the presence of Actions cache service + * + * @returns boolean return true if Actions cache service feature is available, otherwise false + */ +function isFeatureAvailable() { + return !!process.env['ACTIONS_CACHE_URL']; +} +exports.isFeatureAvailable = isFeatureAvailable; +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core.debug('Resolved Keys:'); + core.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + let archivePath = ''; + try { + // path are needed to compute version + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod, + enableCrossOsArchive + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // Cache not found + return undefined; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } + archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + // Download the cache from the cache entry + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + return cacheEntry.cacheKey; + } + catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } + else { + // Supress all non-validation cache related errors because caching should be optional + core.warning(`Failed to restore: ${error.message}`); + } + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return undefined; + }); +} +exports.restoreCache = restoreCache; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +function saveCache(paths, key, options, enableCrossOsArchive = false) { + var _a, _b, _c, _d, _e; + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + checkKey(key); + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + enableCrossOsArchive, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); + } + catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } + else if (typedError.name === ReserveCacheError.name) { + core.info(`Failed to save: ${typedError.message}`); + } + else { + core.warning(`Failed to save: ${typedError.message}`); + } + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return cacheId; + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/cache.js.map b/node_modules/@actions/cache/lib/cache.js.map new file mode 100644 index 0000000..8e24644 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cache.js","sourceRoot":"","sources":["../src/cache.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,2CAA4B;AAC5B,6DAA8C;AAC9C,4EAA6D;AAC7D,wCAA6D;AAG7D,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CACF;AAND,0CAMC;AAED,MAAa,iBAAkB,SAAQ,KAAK;IAC1C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,mBAAmB,CAAA;QAC/B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,iBAAiB,CAAC,SAAS,CAAC,CAAA;IAC1D,CAAC;CACF;AAND,8CAMC;AAED,SAAS,UAAU,CAAC,KAAe;IACjC,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,MAAM,IAAI,eAAe,CACvB,wEAAwE,CACzE,CAAA;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,GAAW;IAC3B,IAAI,GAAG,CAAC,MAAM,GAAG,GAAG,EAAE;QACpB,MAAM,IAAI,eAAe,CACvB,yBAAyB,GAAG,wCAAwC,CACrE,CAAA;KACF;IACD,MAAM,KAAK,GAAG,SAAS,CAAA;IACvB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACpB,MAAM,IAAI,eAAe,CACvB,yBAAyB,GAAG,yBAAyB,CACtD,CAAA;KACF;AACH,CAAC;AAED;;;;GAIG;AAEH,SAAgB,kBAAkB;IAChC,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAA;AAC3C,CAAC;AAFD,gDAEC;AAED;;;;;;;;;GASG;AACH,SAAsB,YAAY,CAChC,KAAe,EACf,UAAkB,EAClB,WAAsB,EACtB,OAAyB,EACzB,oBAAoB,GAAG,KAAK;;QAE5B,UAAU,CAAC,KAAK,CAAC,CAAA;QAEjB,WAAW,GAAG,WAAW,IAAI,EAAE,CAAA;QAC/B,MAAM,IAAI,GAAG,CAAC,UAAU,EAAE,GAAG,WAAW,CAAC,CAAA;QAEzC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAA;QAC5B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;QAEhC,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;YACpB,MAAM,IAAI,eAAe,CACvB,4DAA4D,CAC7D,CAAA;SACF;QACD,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;SACd;QAED,MAAM,iBAAiB,GAAG,MAAM,KAAK,CAAC,oBAAoB,EAAE,CAAA;QAC5D,IAAI,WAAW,GAAG,EAAE,CAAA;QACpB,IAAI;YACF,qCAAqC;YACrC,MAAM,UAAU,GAAG,MAAM,eAAe,CAAC,aAAa,CAAC,IAAI,EAAE,KAAK,EAAE;gBAClE,iBAAiB;gBACjB,oBAAoB;aACrB,CAAC,CAAA;YACF,IAAI,CAAC,CAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,eAAe,CAAA,EAAE;gBAChC,kBAAkB;gBAClB,OAAO,SAAS,CAAA;aACjB;YAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,EAAE;gBACvB,IAAI,CAAC,IAAI,CAAC,iCAAiC,CAAC,CAAA;gBAC5C,OAAO,UAAU,CAAC,QAAQ,CAAA;aAC3B;YAED,WAAW,GAAG,IAAI,CAAC,IAAI,CACrB,MAAM,KAAK,CAAC,mBAAmB,EAAE,EACjC,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAC1C,CAAA;YACD,IAAI,CAAC,KAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;YAE1C,0CAA0C;YAC1C,MAAM,eAAe,CAAC,aAAa,CACjC,UAAU,CAAC,eAAe,EAC1B,WAAW,EACX,OAAO,CACR,CAAA;YAED,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,IAAA,aAAO,EAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;aAC9C;YAED,MAAM,eAAe,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YACpE,IAAI,CAAC,IAAI,CACP,gBAAgB,IAAI,CAAC,KAAK,CACxB,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,KAAK,CAC9B,CAAA;YAED,MAAM,IAAA,gBAAU,EAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;YAChD,IAAI,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAA;YAExC,OAAO,UAAU,CAAC,QAAQ,CAAA;SAC3B;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,UAAU,GAAG,KAAc,CAAA;YACjC,IAAI,UAAU,CAAC,IAAI,KAAK,eAAe,CAAC,IAAI,EAAE;gBAC5C,MAAM,KAAK,CAAA;aACZ;iBAAM;gBACL,qFAAqF;gBACrF,IAAI,CAAC,OAAO,CAAC,sBAAuB,KAAe,CAAC,OAAO,EAAE,CAAC,CAAA;aAC/D;SACF;gBAAS;YACR,0CAA0C;YAC1C,IAAI;gBACF,MAAM,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAA;aACpC;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,KAAK,CAAC,6BAA6B,KAAK,EAAE,CAAC,CAAA;aACjD;SACF;QAED,OAAO,SAAS,CAAA;IAClB,CAAC;CAAA;AAxFD,oCAwFC;AAED;;;;;;;;GAQG;AACH,SAAsB,SAAS,CAC7B,KAAe,EACf,GAAW,EACX,OAAuB,EACvB,oBAAoB,GAAG,KAAK;;;QAE5B,UAAU,CAAC,KAAK,CAAC,CAAA;QACjB,QAAQ,CAAC,GAAG,CAAC,CAAA;QAEb,MAAM,iBAAiB,GAAG,MAAM,KAAK,CAAC,oBAAoB,EAAE,CAAA;QAC5D,IAAI,OAAO,GAAG,CAAC,CAAC,CAAA;QAEhB,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,YAAY,CAAC,KAAK,CAAC,CAAA;QAClD,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;QAC1B,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,CAAC,CAAA;QAE3C,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,MAAM,IAAI,KAAK,CACb,qHAAqH,CACtH,CAAA;SACF;QAED,MAAM,aAAa,GAAG,MAAM,KAAK,CAAC,mBAAmB,EAAE,CAAA;QACvD,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAC3B,aAAa,EACb,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAC1C,CAAA;QAED,IAAI,CAAC,KAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;QAE1C,IAAI;YACF,MAAM,IAAA,eAAS,EAAC,aAAa,EAAE,UAAU,EAAE,iBAAiB,CAAC,CAAA;YAC7D,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,IAAA,aAAO,EAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;aAC9C;YACD,MAAM,aAAa,GAAG,EAAE,GAAG,IAAI,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,sBAAsB;YACpE,MAAM,eAAe,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YACpE,IAAI,CAAC,KAAK,CAAC,cAAc,eAAe,EAAE,CAAC,CAAA;YAE3C,2FAA2F;YAC3F,IAAI,eAAe,GAAG,aAAa,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,EAAE;gBACtD,MAAM,IAAI,KAAK,CACb,kBAAkB,IAAI,CAAC,KAAK,CAC1B,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,+CAA+C,CACxE,CAAA;aACF;YAED,IAAI,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAA;YAC7B,MAAM,oBAAoB,GAAG,MAAM,eAAe,CAAC,YAAY,CAC7D,GAAG,EACH,KAAK,EACL;gBACE,iBAAiB;gBACjB,oBAAoB;gBACpB,SAAS,EAAE,eAAe;aAC3B,CACF,CAAA;YAED,IAAI,MAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,MAAM,0CAAE,OAAO,EAAE;gBACzC,OAAO,GAAG,MAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,MAAM,0CAAE,OAAO,CAAA;aAChD;iBAAM,IAAI,CAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,UAAU,MAAK,GAAG,EAAE;gBACnD,MAAM,IAAI,KAAK,CACb,MAAA,MAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,KAAK,0CAAE,OAAO,mCAClC,kBAAkB,IAAI,CAAC,KAAK,CAC1B,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,mDAAmD,CAC9E,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,iBAAiB,CACzB,oCAAoC,GAAG,2DAA2D,MAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,KAAK,0CAAE,OAAO,EAAE,CACzI,CAAA;aACF;YAED,IAAI,CAAC,KAAK,CAAC,qBAAqB,OAAO,GAAG,CAAC,CAAA;YAC3C,MAAM,eAAe,CAAC,SAAS,CAAC,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAA;SAC/D;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,UAAU,GAAG,KAAc,CAAA;YACjC,IAAI,UAAU,CAAC,IAAI,KAAK,eAAe,CAAC,IAAI,EAAE;gBAC5C,MAAM,KAAK,CAAA;aACZ;iBAAM,IAAI,UAAU,CAAC,IAAI,KAAK,iBAAiB,CAAC,IAAI,EAAE;gBACrD,IAAI,CAAC,IAAI,CAAC,mBAAmB,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;iBAAM;gBACL,IAAI,CAAC,OAAO,CAAC,mBAAmB,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;aACtD;SACF;gBAAS;YACR,0CAA0C;YAC1C,IAAI;gBACF,MAAM,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAA;aACpC;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,KAAK,CAAC,6BAA6B,KAAK,EAAE,CAAC,CAAA;aACjD;SACF;QAED,OAAO,OAAO,CAAA;;CACf;AA/FD,8BA+FC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts b/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts new file mode 100644 index 0000000..bf421c1 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts @@ -0,0 +1,8 @@ +import { CompressionMethod } from './constants'; +import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts'; +import { DownloadOptions, UploadOptions } from '../options'; +export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string; +export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise; +export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise; +export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise>; +export declare function saveCache(cacheId: number, archivePath: string, options?: UploadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.js b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js new file mode 100644 index 0000000..e00dfbf --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js @@ -0,0 +1,262 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const crypto = __importStar(require("crypto")); +const fs = __importStar(require("fs")); +const url_1 = require("url"); +const utils = __importStar(require("./cacheUtils")); +const downloadUtils_1 = require("./downloadUtils"); +const options_1 = require("../options"); +const requestUtils_1 = require("./requestUtils"); +const versionSalt = '1.0'; +function getCacheApiUrl(resource) { + const baseUrl = process.env['ACTIONS_CACHE_URL'] || ''; + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.'); + } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core.debug(`Resource Url: ${url}`); + return url; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + }; + return requestOptions; +} +function createHttpClient() { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); +} +function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + // don't pass changes upstream + const components = paths.slice(); + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod); + } + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only'); + } + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt); + return crypto.createHash('sha256').update(components.join('|')).digest('hex'); +} +exports.getCacheVersion = getCacheVersion; +function getCacheEntry(keys, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found + if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } + return null; + } + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. + throw new Error('Cache not found.'); + } + core.setSecret(cacheDownloadUrl); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} +function downloadCache(archiveLocation, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = (0, options_1.getDownloadOptions)(options); + if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + if (downloadOptions.useAzureSdk) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } + else if (downloadOptions.concurrentBlobDownloads) { + // Use concurrent implementation with HttpClient to work around blob SDK issue + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } + } + else { + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } + }); +} +exports.downloadCache = downloadCache; +// Reserve Cache +function reserveCache(key, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const reserveCacheRequest = { + key, + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); + })); + return response; + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + }; + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + })); + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + }); +} +function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs.openSync(archivePath, 'r'); + const uploadOptions = (0, options_1.getUploadOptions)(options); + const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug('Awaiting all uploads'); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(httpClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); +} +function saveCache(cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + core.debug('Upload cache'); + yield uploadFile(httpClient, cacheId, archivePath, options); + // Commit Cache + core.debug('Commiting cache'); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core.info('Cache saved successfully'); + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cacheHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map new file mode 100644 index 0000000..64614dd --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cacheHttpClient.js","sourceRoot":"","sources":["../../src/internal/cacheHttpClient.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAA+C;AAC/C,wDAAqE;AAKrE,+CAAgC;AAChC,uCAAwB;AACxB,6BAAuB;AAEvB,oDAAqC;AAWrC,mDAIwB;AACxB,wCAKmB;AACnB,iDAIuB;AAEvB,MAAM,WAAW,GAAG,KAAK,CAAA;AAEzB,SAAS,cAAc,CAAC,QAAgB;IACtC,MAAM,OAAO,GAAW,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAA;IAC9D,IAAI,CAAC,OAAO,EAAE;QACZ,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAA;KACzE;IAED,MAAM,GAAG,GAAG,GAAG,OAAO,uBAAuB,QAAQ,EAAE,CAAA;IACvD,IAAI,CAAC,KAAK,CAAC,iBAAiB,GAAG,EAAE,CAAC,CAAA;IAClC,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAY,EAAE,UAAkB;IAC1D,OAAO,GAAG,IAAI,gBAAgB,UAAU,EAAE,CAAA;AAC5C,CAAC;AAED,SAAS,iBAAiB;IACxB,MAAM,cAAc,GAAmB;QACrC,OAAO,EAAE;YACP,MAAM,EAAE,kBAAkB,CAAC,kBAAkB,EAAE,eAAe,CAAC;SAChE;KACF,CAAA;IAED,OAAO,cAAc,CAAA;AACvB,CAAC;AAED,SAAS,gBAAgB;IACvB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,IAAI,EAAE,CAAA;IACxD,MAAM,uBAAuB,GAAG,IAAI,8BAAuB,CAAC,KAAK,CAAC,CAAA;IAElE,OAAO,IAAI,wBAAU,CACnB,eAAe,EACf,CAAC,uBAAuB,CAAC,EACzB,iBAAiB,EAAE,CACpB,CAAA;AACH,CAAC;AAED,SAAgB,eAAe,CAC7B,KAAe,EACf,iBAAqC,EACrC,oBAAoB,GAAG,KAAK;IAE5B,8BAA8B;IAC9B,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,EAAE,CAAA;IAEhC,qDAAqD;IACrD,6CAA6C;IAC7C,IAAI,iBAAiB,EAAE;QACrB,UAAU,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;KACnC;IAED,oEAAoE;IACpE,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,oBAAoB,EAAE;QACzD,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;KAChC;IAED,uEAAuE;IACvE,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAE5B,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;AAC/E,CAAC;AAvBD,0CAuBC;AAED,SAAsB,aAAa,CACjC,IAAc,EACd,KAAe,EACf,OAA8B;;QAE9B,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,eAAe,CAC7B,KAAK,EACL,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,EAC1B,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,oBAAoB,CAC9B,CAAA;QACD,MAAM,QAAQ,GAAG,cAAc,kBAAkB,CAC/C,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CACf,YAAY,OAAO,EAAE,CAAA;QAEtB,MAAM,QAAQ,GAAG,MAAM,IAAA,iCAAkB,EAAC,eAAe,EAAE,GAAS,EAAE,gDACpE,OAAA,UAAU,CAAC,OAAO,CAAqB,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAA,GAAA,CACjE,CAAA;QACD,kBAAkB;QAClB,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;YAC/B,uDAAuD;YACvD,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,6BAA6B,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;aAClE;YACD,OAAO,IAAI,CAAA;SACZ;QACD,IAAI,CAAC,IAAA,kCAAmB,EAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YAC7C,MAAM,IAAI,KAAK,CAAC,gCAAgC,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAA;SACvE;QAED,MAAM,WAAW,GAAG,QAAQ,CAAC,MAAM,CAAA;QACnC,MAAM,gBAAgB,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,eAAe,CAAA;QACrD,IAAI,CAAC,gBAAgB,EAAE;YACrB,gFAAgF;YAChF,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAA;SACpC;QACD,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAA;QAC3B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAA;QAEvC,OAAO,WAAW,CAAA;IACpB,CAAC;CAAA;AAzCD,sCAyCC;AAED,SAAe,6BAA6B,CAC1C,GAAW,EACX,UAAsB,EACtB,OAAe;;QAEf,MAAM,QAAQ,GAAG,cAAc,kBAAkB,CAAC,GAAG,CAAC,EAAE,CAAA;QACxD,MAAM,QAAQ,GAAG,MAAM,IAAA,iCAAkB,EAAC,WAAW,EAAE,GAAS,EAAE,gDAChE,OAAA,UAAU,CAAC,OAAO,CAAoB,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAA,GAAA,CAChE,CAAA;QACD,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;YAC/B,MAAM,eAAe,GAAG,QAAQ,CAAC,MAAM,CAAA;YACvC,MAAM,UAAU,GAAG,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,UAAU,CAAA;YAC9C,IAAI,UAAU,IAAI,UAAU,GAAG,CAAC,EAAE;gBAChC,IAAI,CAAC,KAAK,CACR,0CAA0C,GAAG,eAAe,OAAO,cAAc,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,yRAAyR,CACpY,CAAA;gBACD,KAAK,MAAM,UAAU,IAAI,CAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,cAAc,KAAI,EAAE,EAAE;oBAC9D,IAAI,CAAC,KAAK,CACR,cAAc,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,QAAQ,oBAAoB,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,YAAY,kBAAkB,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,KAAK,oBAAoB,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,YAAY,EAAE,CAChK,CAAA;iBACF;aACF;SACF;IACH,CAAC;CAAA;AAED,SAAsB,aAAa,CACjC,eAAuB,EACvB,WAAmB,EACnB,OAAyB;;QAEzB,MAAM,UAAU,GAAG,IAAI,SAAG,CAAC,eAAe,CAAC,CAAA;QAC3C,MAAM,eAAe,GAAG,IAAA,4BAAkB,EAAC,OAAO,CAAC,CAAA;QAEnD,IAAI,UAAU,CAAC,QAAQ,CAAC,QAAQ,CAAC,wBAAwB,CAAC,EAAE;YAC1D,IAAI,eAAe,CAAC,WAAW,EAAE;gBAC/B,6FAA6F;gBAC7F,MAAM,IAAA,uCAAuB,EAC3B,eAAe,EACf,WAAW,EACX,eAAe,CAChB,CAAA;aACF;iBAAM,IAAI,eAAe,CAAC,uBAAuB,EAAE;gBAClD,8EAA8E;gBAC9E,MAAM,IAAA,iDAAiC,EACrC,eAAe,EACf,WAAW,EACX,eAAe,CAChB,CAAA;aACF;iBAAM;gBACL,qDAAqD;gBACrD,MAAM,IAAA,uCAAuB,EAAC,eAAe,EAAE,WAAW,CAAC,CAAA;aAC5D;SACF;aAAM;YACL,MAAM,IAAA,uCAAuB,EAAC,eAAe,EAAE,WAAW,CAAC,CAAA;SAC5D;IACH,CAAC;CAAA;AA9BD,sCA8BC;AAED,gBAAgB;AAChB,SAAsB,YAAY,CAChC,GAAW,EACX,KAAe,EACf,OAA8B;;QAE9B,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,eAAe,CAC7B,KAAK,EACL,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,EAC1B,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,oBAAoB,CAC9B,CAAA;QAED,MAAM,mBAAmB,GAAwB;YAC/C,GAAG;YACH,OAAO;YACP,SAAS,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS;SAC9B,CAAA;QACD,MAAM,QAAQ,GAAG,MAAM,IAAA,iCAAkB,EAAC,cAAc,EAAE,GAAS,EAAE;YACnE,OAAA,UAAU,CAAC,QAAQ,CACjB,cAAc,CAAC,QAAQ,CAAC,EACxB,mBAAmB,CACpB,CAAA;UAAA,CACF,CAAA;QACD,OAAO,QAAQ,CAAA;IACjB,CAAC;CAAA;AAxBD,oCAwBC;AAED,SAAS,eAAe,CAAC,KAAa,EAAE,GAAW;IACjD,oCAAoC;IACpC,8BAA8B;IAC9B,oBAAoB;IACpB,2CAA2C;IAC3C,+BAA+B;IAC/B,OAAO,SAAS,KAAK,IAAI,GAAG,IAAI,CAAA;AAClC,CAAC;AAED,SAAe,WAAW,CACxB,UAAsB,EACtB,WAAmB,EACnB,UAAuC,EACvC,KAAa,EACb,GAAW;;QAEX,IAAI,CAAC,KAAK,CACR,2BACE,GAAG,GAAG,KAAK,GAAG,CAChB,oBAAoB,KAAK,wBAAwB,eAAe,CAC9D,KAAK,EACL,GAAG,CACJ,EAAE,CACJ,CAAA;QACD,MAAM,iBAAiB,GAAG;YACxB,cAAc,EAAE,0BAA0B;YAC1C,eAAe,EAAE,eAAe,CAAC,KAAK,EAAE,GAAG,CAAC;SAC7C,CAAA;QAED,MAAM,mBAAmB,GAAG,MAAM,IAAA,sCAAuB,EACvD,uBAAuB,KAAK,UAAU,GAAG,GAAG,EAC5C,GAAS,EAAE;YACT,OAAA,UAAU,CAAC,UAAU,CACnB,OAAO,EACP,WAAW,EACX,UAAU,EAAE,EACZ,iBAAiB,CAClB,CAAA;UAAA,CACJ,CAAA;QAED,IAAI,CAAC,IAAA,kCAAmB,EAAC,mBAAmB,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAChE,MAAM,IAAI,KAAK,CACb,gCAAgC,mBAAmB,CAAC,OAAO,CAAC,UAAU,uBAAuB,CAC9F,CAAA;SACF;IACH,CAAC;CAAA;AAED,SAAe,UAAU,CACvB,UAAsB,EACtB,OAAe,EACf,WAAmB,EACnB,OAAuB;;QAEvB,gBAAgB;QAChB,MAAM,QAAQ,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;QAC7D,MAAM,WAAW,GAAG,cAAc,CAAC,UAAU,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;QAClE,MAAM,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;QACxC,MAAM,aAAa,GAAG,IAAA,0BAAgB,EAAC,OAAO,CAAC,CAAA;QAE/C,MAAM,WAAW,GAAG,KAAK,CAAC,aAAa,CACrC,mBAAmB,EACnB,aAAa,CAAC,iBAAiB,CAChC,CAAA;QACD,MAAM,YAAY,GAAG,KAAK,CAAC,aAAa,CACtC,iBAAiB,EACjB,aAAa,CAAC,eAAe,CAC9B,CAAA;QAED,MAAM,eAAe,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;QAC1D,IAAI,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAA;QAClC,IAAI,MAAM,GAAG,CAAC,CAAA;QAEd,IAAI;YACF,MAAM,OAAO,CAAC,GAAG,CACf,eAAe,CAAC,GAAG,CAAC,GAAS,EAAE;gBAC7B,OAAO,MAAM,GAAG,QAAQ,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,GAAG,MAAM,EAAE,YAAY,CAAC,CAAA;oBAC3D,MAAM,KAAK,GAAG,MAAM,CAAA;oBACpB,MAAM,GAAG,GAAG,MAAM,GAAG,SAAS,GAAG,CAAC,CAAA;oBAClC,MAAM,IAAI,YAAY,CAAA;oBAEtB,MAAM,WAAW,CACf,UAAU,EACV,WAAW,EACX,GAAG,EAAE,CACH,EAAE;yBACC,gBAAgB,CAAC,WAAW,EAAE;wBAC7B,EAAE;wBACF,KAAK;wBACL,GAAG;wBACH,SAAS,EAAE,KAAK;qBACjB,CAAC;yBACD,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,MAAM,IAAI,KAAK,CACb,qDAAqD,KAAK,CAAC,OAAO,EAAE,CACrE,CAAA;oBACH,CAAC,CAAC,EACN,KAAK,EACL,GAAG,CACJ,CAAA;iBACF;YACH,CAAC,CAAA,CAAC,CACH,CAAA;SACF;gBAAS;YACR,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAA;SACjB;QACD,OAAM;IACR,CAAC;CAAA;AAED,SAAe,WAAW,CACxB,UAAsB,EACtB,OAAe,EACf,QAAgB;;QAEhB,MAAM,kBAAkB,GAAuB,EAAC,IAAI,EAAE,QAAQ,EAAC,CAAA;QAC/D,OAAO,MAAM,IAAA,iCAAkB,EAAC,aAAa,EAAE,GAAS,EAAE;YACxD,OAAA,UAAU,CAAC,QAAQ,CACjB,cAAc,CAAC,UAAU,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,EAC9C,kBAAkB,CACnB,CAAA;UAAA,CACF,CAAA;IACH,CAAC;CAAA;AAED,SAAsB,SAAS,CAC7B,OAAe,EACf,WAAmB,EACnB,OAAuB;;QAEvB,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QAErC,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;QAC1B,MAAM,UAAU,CAAC,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAA;QAE3D,eAAe;QACf,IAAI,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAA;QAC7B,MAAM,SAAS,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;QAC9D,IAAI,CAAC,IAAI,CACP,gBAAgB,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ,SAAS,KAAK,CAC5E,CAAA;QAED,MAAM,mBAAmB,GAAG,MAAM,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7E,IAAI,CAAC,IAAA,kCAAmB,EAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;YACxD,MAAM,IAAI,KAAK,CACb,gCAAgC,mBAAmB,CAAC,UAAU,uBAAuB,CACtF,CAAA;SACF;QAED,IAAI,CAAC,IAAI,CAAC,0BAA0B,CAAC,CAAA;IACvC,CAAC;CAAA;AAzBD,8BAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts b/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts new file mode 100644 index 0000000..4e3041f --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts @@ -0,0 +1,12 @@ +/// +import * as fs from 'fs'; +import { CompressionMethod } from './constants'; +export declare function createTempDirectory(): Promise; +export declare function getArchiveFileSizeInBytes(filePath: string): number; +export declare function resolvePaths(patterns: string[]): Promise; +export declare function unlinkFile(filePath: fs.PathLike): Promise; +export declare function getCompressionMethod(): Promise; +export declare function getCacheFileName(compressionMethod: CompressionMethod): string; +export declare function getGnuTarPathOnWindows(): Promise; +export declare function assertDefined(name: string, value?: T): T; +export declare function isGhes(): boolean; diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.js b/node_modules/@actions/cache/lib/internal/cacheUtils.js new file mode 100644 index 0000000..e917ced --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.js @@ -0,0 +1,198 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; +const core = __importStar(require("@actions/core")); +const exec = __importStar(require("@actions/exec")); +const glob = __importStar(require("@actions/glob")); +const io = __importStar(require("@actions/io")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +const semver = __importStar(require("semver")); +const util = __importStar(require("util")); +const uuid_1 = require("uuid"); +const constants_1 = require("./constants"); +// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + let tempDirectory = process.env['RUNNER_TEMP'] || ''; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\'; + } + else { + if (process.platform === 'darwin') { + baseLocation = '/Users'; + } + else { + baseLocation = '/home'; + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp'); + } + const dest = path.join(tempDirectory, (0, uuid_1.v4)()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function getArchiveFileSizeInBytes(filePath) { + return fs.statSync(filePath).size; +} +exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; +function resolvePaths(patterns) { + var _a, e_1, _b, _c; + var _d; + return __awaiter(this, void 0, void 0, function* () { + const paths = []; + const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); + const globber = yield glob.create(patterns.join('\n'), { + implicitDescendants: false + }); + try { + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { + _c = _g.value; + _e = false; + const file = _c; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); + } + finally { if (e_1) throw e_1.error; } + } + return paths; + }); +} +exports.resolvePaths = resolvePaths; +function unlinkFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + return util.promisify(fs.unlink)(filePath); + }); +} +exports.unlinkFile = unlinkFile; +function getVersion(app, additionalArgs = []) { + return __awaiter(this, void 0, void 0, function* () { + let versionOutput = ''; + additionalArgs.push('--version'); + core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); + try { + yield exec.exec(`${app}`, additionalArgs, { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + } + catch (err) { + core.debug(err.message); + } + versionOutput = versionOutput.trim(); + core.debug(versionOutput); + return versionOutput; + }); +} +// Use zstandard if possible to maximize cache performance +function getCompressionMethod() { + return __awaiter(this, void 0, void 0, function* () { + const versionOutput = yield getVersion('zstd', ['--quiet']); + const version = semver.clean(versionOutput); + core.debug(`zstd version: ${version}`); + if (versionOutput === '') { + return constants_1.CompressionMethod.Gzip; + } + else { + return constants_1.CompressionMethod.ZstdWithoutLong; + } + }); +} +exports.getCompressionMethod = getCompressionMethod; +function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip + ? constants_1.CacheFilename.Gzip + : constants_1.CacheFilename.Zstd; +} +exports.getCacheFileName = getCacheFileName; +function getGnuTarPathOnWindows() { + return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } + const versionOutput = yield getVersion('tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; + }); +} +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; +function assertDefined(name, value) { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`); + } + return value; +} +exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === 'GITHUB.COM'; + const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST'); + return !isGitHubHost && !isGheHost; +} +exports.isGhes = isGhes; +//# sourceMappingURL=cacheUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.js.map b/node_modules/@actions/cache/lib/internal/cacheUtils.js.map new file mode 100644 index 0000000..a66d388 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,+BAAiC;AACjC,2CAIoB;AAEpB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,IAAA,SAAM,GAAE,CAAC,CAAA;QAC/C,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GACb,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,QAAQ,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAA;IAEtE,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,CAAA;AACpC,CAAC;AAXD,wBAWC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/constants.d.ts b/node_modules/@actions/cache/lib/internal/constants.d.ts new file mode 100644 index 0000000..1d8de71 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.d.ts @@ -0,0 +1,20 @@ +export declare enum CacheFilename { + Gzip = "cache.tgz", + Zstd = "cache.tzst" +} +export declare enum CompressionMethod { + Gzip = "gzip", + ZstdWithoutLong = "zstd-without-long", + Zstd = "zstd" +} +export declare enum ArchiveToolType { + GNU = "gnu", + BSD = "bsd" +} +export declare const DefaultRetryAttempts = 2; +export declare const DefaultRetryDelay = 5000; +export declare const SocketTimeout = 5000; +export declare const GnuTarPathOnWindows: string; +export declare const SystemTarPathOnWindows: string; +export declare const TarFilename = "cache.tar"; +export declare const ManifestFilename = "manifest.txt"; diff --git a/node_modules/@actions/cache/lib/internal/constants.js b/node_modules/@actions/cache/lib/internal/constants.js new file mode 100644 index 0000000..45b910d --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; +var CacheFilename; +(function (CacheFilename) { + CacheFilename["Gzip"] = "cache.tgz"; + CacheFilename["Zstd"] = "cache.tzst"; +})(CacheFilename || (exports.CacheFilename = CacheFilename = {})); +var CompressionMethod; +(function (CompressionMethod) { + CompressionMethod["Gzip"] = "gzip"; + // Long range mode was added to zstd in v1.3.2. + // This enum is for earlier version of zstd that does not have --long support + CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod["Zstd"] = "zstd"; +})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {})); +// The default number of retry attempts. +exports.DefaultRetryAttempts = 2; +// The default delay in milliseconds between retry attempts. +exports.DefaultRetryDelay = 5000; +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/constants.js.map b/node_modules/@actions/cache/lib/internal/constants.js.map new file mode 100644 index 0000000..30500d6 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts b/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts new file mode 100644 index 0000000..796b8a6 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts @@ -0,0 +1,83 @@ +/// +import { TransferProgressEvent } from '@azure/ms-rest-js'; +import * as fs from 'fs'; +import { DownloadOptions } from '../options'; +/** + * Class for tracking the download state and displaying stats. + */ +export declare class DownloadProgress { + contentLength: number; + segmentIndex: number; + segmentSize: number; + segmentOffset: number; + receivedBytes: number; + startTime: number; + displayedComplete: boolean; + timeoutHandle?: ReturnType; + constructor(contentLength: number); + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize: number): void; + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes: number): void; + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes(): number; + /** + * Returns true if the download is complete. + */ + isDone(): boolean; + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display(): void; + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress(): (progress: TransferProgressEvent) => void; + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs?: number): void; + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer(): void; +} +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +export declare function downloadCacheHttpClient(archiveLocation: string, archivePath: string): Promise; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +export declare function downloadCacheHttpClientConcurrent(archiveLocation: string, archivePath: fs.PathLike, options: DownloadOptions): Promise; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +export declare function downloadCacheStorageSDK(archiveLocation: string, archivePath: string, options: DownloadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.js b/node_modules/@actions/cache/lib/internal/downloadUtils.js new file mode 100644 index 0000000..1586d1c --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.js @@ -0,0 +1,378 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const storage_blob_1 = require("@azure/storage-blob"); +const buffer = __importStar(require("buffer")); +const fs = __importStar(require("fs")); +const stream = __importStar(require("stream")); +const util = __importStar(require("util")); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +const requestUtils_1 = require("./requestUtils"); +const abort_controller_1 = require("@azure/abort-controller"); +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +function pipeResponseToStream(response, output) { + return __awaiter(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); +} +/** + * Class for tracking the download state and displaying stats. + */ +class DownloadProgress { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); + } + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000)).toFixed(1); + core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } + } + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = undefined; + } + this.display(); + } +} +exports.DownloadProgress = DownloadProgress; +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const writeStream = fs.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient('actions/cache'); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length']; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug('Unable to validate download, no Content-Length header'); + } + }); +} +exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug('Unable to determine content length, downloading file with http-client...'); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } + else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. + // Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs.openSync(archivePath, 'w'); + try { + downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + })); + if (result === 'timeout') { + controller.abort(); + throw new Error('Aborting cache download as the download time exceeded the timeout.'); + } + else if (Buffer.isBuffer(result)) { + fs.writeFileSync(fd, result); + } + } + } + finally { + downloadProgress.stopDisplayTimer(); + fs.closeSync(fd); + } + } + }); +} +exports.downloadCacheStorageSDK = downloadCacheStorageSDK; +const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise(resolve => { + timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then(result => { + clearTimeout(timeoutHandle); + return result; + }); +}); +//# sourceMappingURL=downloadUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.js.map b/node_modules/@actions/cache/lib/internal/downloadUtils.js.map new file mode 100644 index 0000000..4237de1 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"downloadUtils.js","sourceRoot":"","sources":["../../src/internal/downloadUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAAmE;AACnE,sDAAmD;AAEnD,+CAAgC;AAChC,uCAAwB;AACxB,+CAAgC;AAChC,2CAA4B;AAE5B,oDAAqC;AACrC,2CAAyC;AAEzC,iDAAsD;AAEtD,8DAAuD;AAEvD;;;;;GAKG;AACH,SAAe,oBAAoB,CACjC,QAA4B,EAC5B,MAA6B;;QAE7B,MAAM,QAAQ,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;QAChD,MAAM,QAAQ,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAC1C,CAAC;CAAA;AAED;;GAEG;AACH,MAAa,gBAAgB;IAU3B,YAAY,aAAqB;QAC/B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;QAClC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAA;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QACtB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QACtB,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAA;QAC9B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;IAC7B,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAAmB;QAC7B,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,WAAW,CAAA;QAC1D,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;QACzC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;QAC9B,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QAEtB,IAAI,CAAC,KAAK,CACR,iCAAiC,IAAI,CAAC,aAAa,gBAAgB,IAAI,CAAC,WAAW,KAAK,CACzF,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,gBAAgB,CAAC,aAAqB;QACpC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;IACpC,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAA;IAChD,CAAC;IAED;;OAEG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,mBAAmB,EAAE,KAAK,IAAI,CAAC,aAAa,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,OAAM;SACP;QAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAA;QAChE,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,OAAO,CACxE,CAAC,CACF,CAAA;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;QAC/C,MAAM,aAAa,GAAG,CACpB,gBAAgB;YAChB,CAAC,IAAI,GAAG,IAAI,CAAC;YACb,CAAC,WAAW,GAAG,IAAI,CAAC,CACrB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAEZ,IAAI,CAAC,IAAI,CACP,YAAY,gBAAgB,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,OAAO,aAAa,UAAU,CACnG,CAAA;QAED,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAA;SAC9B;IACH,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,QAA+B,EAAE,EAAE;YACzC,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAA;QAC7C,CAAC,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CAAC,SAAS,GAAG,IAAI;QAChC,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAC,OAAO,EAAE,CAAA;YAEd,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;aAC5D;QACH,CAAC,CAAA;QAED,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;IAC7D,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAChC,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;SAC/B;QAED,IAAI,CAAC,OAAO,EAAE,CAAA;IAChB,CAAC;CACF;AAhID,4CAgIC;AAED;;;;;GAKG;AACH,SAAsB,uBAAuB,CAC3C,eAAuB,EACvB,WAAmB;;QAEnB,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACrD,MAAM,UAAU,GAAG,IAAI,wBAAU,CAAC,eAAe,CAAC,CAAA;QAClD,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAuB,EACpD,eAAe,EACf,GAAS,EAAE,gDAAC,OAAA,UAAU,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA,GAAA,CAC5C,CAAA;QAED,yDAAyD;QACzD,gBAAgB,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,yBAAa,EAAE,GAAG,EAAE;YAC7D,gBAAgB,CAAC,OAAO,CAAC,OAAO,EAAE,CAAA;YAClC,IAAI,CAAC,KAAK,CAAC,6CAA6C,yBAAa,KAAK,CAAC,CAAA;QAC7E,CAAC,CAAC,CAAA;QAEF,MAAM,oBAAoB,CAAC,gBAAgB,EAAE,WAAW,CAAC,CAAA;QAEzD,0BAA0B;QAC1B,MAAM,mBAAmB,GAAG,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;QAE9E,IAAI,mBAAmB,EAAE;YACvB,MAAM,cAAc,GAAG,QAAQ,CAAC,mBAAmB,CAAC,CAAA;YACpD,MAAM,YAAY,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YAEjE,IAAI,YAAY,KAAK,cAAc,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,4CAA4C,cAAc,uBAAuB,YAAY,EAAE,CAChG,CAAA;aACF;SACF;aAAM;YACL,IAAI,CAAC,KAAK,CAAC,uDAAuD,CAAC,CAAA;SACpE;IACH,CAAC;CAAA;AAlCD,0DAkCC;AAED;;;;;GAKG;AACH,SAAsB,iCAAiC,CACrD,eAAuB,EACvB,WAAwB,EACxB,OAAwB;;;QAExB,MAAM,iBAAiB,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;QAClE,MAAM,UAAU,GAAG,IAAI,wBAAU,CAAC,eAAe,EAAE,SAAS,EAAE;YAC5D,aAAa,EAAE,OAAO,CAAC,WAAW;YAClC,SAAS,EAAE,IAAI;SAChB,CAAC,CAAA;QACF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAA,sCAAuB,EACvC,uBAAuB,EACvB,GAAS,EAAE,gDAAC,OAAA,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,eAAe,EAAE,IAAI,EAAE,EAAE,CAAC,CAAA,GAAA,CACxE,CAAA;YAED,MAAM,YAAY,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;YAC1D,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,KAAK,IAAI,EAAE;gBACvD,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAA;aAC7D;YAED,MAAM,MAAM,GAAG,QAAQ,CAAC,YAAY,CAAC,CAAA;YACrC,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE;gBACxB,MAAM,IAAI,KAAK,CAAC,uCAAuC,MAAM,EAAE,CAAC,CAAA;aACjE;YAED,MAAM,SAAS,GAGT,EAAE,CAAA;YACR,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA;YAEjC,KAAK,IAAI,MAAM,GAAG,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,MAAM,IAAI,SAAS,EAAE;gBACzD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,SAAS,EAAE,MAAM,GAAG,MAAM,CAAC,CAAA;gBAClD,SAAS,CAAC,IAAI,CAAC;oBACb,MAAM;oBACN,aAAa,EAAE,GAAS,EAAE;wBACxB,OAAO,MAAM,oBAAoB,CAC/B,UAAU,EACV,eAAe,EACf,MAAM,EACN,KAAK,CACN,CAAA;oBACH,CAAC,CAAA;iBACF,CAAC,CAAA;aACH;YAED,wCAAwC;YACxC,SAAS,CAAC,OAAO,EAAE,CAAA;YACnB,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,IAAI,eAAe,GAAG,CAAC,CAAA;YACvB,MAAM,QAAQ,GAAG,IAAI,gBAAgB,CAAC,MAAM,CAAC,CAAA;YAC7C,QAAQ,CAAC,iBAAiB,EAAE,CAAA;YAC5B,MAAM,UAAU,GAAG,QAAQ,CAAC,UAAU,EAAE,CAAA;YAExC,MAAM,eAAe,GAAiD,EAAE,CAAA;YACxE,IAAI,YAES,CAAA;YAEb,MAAM,YAAY,GAAwB,GAAS,EAAE;gBACnD,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC,CAAA;gBAClE,MAAM,iBAAiB,CAAC,KAAK,CAC3B,OAAO,CAAC,MAAM,EACd,CAAC,EACD,OAAO,CAAC,KAAK,EACb,OAAO,CAAC,MAAM,CACf,CAAA;gBACD,OAAO,EAAE,CAAA;gBACT,OAAO,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,CAAA;gBACtC,eAAe,IAAI,OAAO,CAAC,KAAK,CAAA;gBAChC,UAAU,CAAC,EAAC,WAAW,EAAE,eAAe,EAAC,CAAC,CAAA;YAC5C,CAAC,CAAA,CAAA;YAED,OAAO,CAAC,YAAY,GAAG,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE;gBACvC,eAAe,CAAC,YAAY,CAAC,MAAM,CAAC,GAAG,YAAY,CAAC,aAAa,EAAE,CAAA;gBACnE,OAAO,EAAE,CAAA;gBAET,IAAI,OAAO,IAAI,CAAC,MAAA,OAAO,CAAC,mBAAmB,mCAAI,EAAE,CAAC,EAAE;oBAClD,MAAM,YAAY,EAAE,CAAA;iBACrB;aACF;YAED,OAAO,OAAO,GAAG,CAAC,EAAE;gBAClB,MAAM,YAAY,EAAE,CAAA;aACrB;SACF;gBAAS;YACR,UAAU,CAAC,OAAO,EAAE,CAAA;YACpB,MAAM,iBAAiB,CAAC,KAAK,EAAE,CAAA;SAChC;;CACF;AA1FD,8EA0FC;AAED,SAAe,oBAAoB,CACjC,UAAsB,EACtB,eAAuB,EACvB,MAAc,EACd,KAAa;;QAEb,MAAM,OAAO,GAAG,CAAC,CAAA;QACjB,IAAI,QAAQ,GAAG,CAAC,CAAA;QAEhB,OAAO,IAAI,EAAE;YACX,IAAI;gBACF,MAAM,OAAO,GAAG,KAAK,CAAA;gBACrB,MAAM,MAAM,GAAG,MAAM,kBAAkB,CACrC,OAAO,EACP,eAAe,CAAC,UAAU,EAAE,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,CAC5D,CAAA;gBACD,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;oBAC9B,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;iBAC9D;gBAED,OAAO,MAAM,CAAA;aACd;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,QAAQ,IAAI,OAAO,EAAE;oBACvB,MAAM,GAAG,CAAA;iBACV;gBAED,QAAQ,EAAE,CAAA;aACX;SACF;IACH,CAAC;CAAA;AAED,SAAe,eAAe,CAC5B,UAAsB,EACtB,eAAuB,EACvB,MAAc,EACd,KAAa;;QAEb,MAAM,OAAO,GAAG,MAAM,IAAA,sCAAuB,EAC3C,mBAAmB,EACnB,GAAS,EAAE;YACT,OAAA,MAAM,UAAU,CAAC,GAAG,CAAC,eAAe,EAAE;gBACpC,KAAK,EAAE,SAAS,MAAM,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,EAAE;aAC/C,CAAC,CAAA;UAAA,CACL,CAAA;QAED,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QAED,OAAO;YACL,MAAM;YACN,KAAK;YACL,MAAM,EAAE,MAAM,OAAO,CAAC,cAAc,EAAE;SACvC,CAAA;IACH,CAAC;CAAA;AAQD;;;;;;;GAOG;AACH,SAAsB,uBAAuB,CAC3C,eAAuB,EACvB,WAAmB,EACnB,OAAwB;;;QAExB,MAAM,MAAM,GAAG,IAAI,8BAAe,CAAC,eAAe,EAAE,SAAS,EAAE;YAC7D,YAAY,EAAE;gBACZ,6DAA6D;gBAC7D,mDAAmD;gBACnD,cAAc,EAAE,OAAO,CAAC,WAAW;aACpC;SACF,CAAC,CAAA;QAEF,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,aAAa,EAAE,CAAA;QAC/C,MAAM,aAAa,GAAG,MAAA,UAAU,CAAC,aAAa,mCAAI,CAAC,CAAC,CAAA;QAEpD,IAAI,aAAa,GAAG,CAAC,EAAE;YACrB,oFAAoF;YACpF,2BAA2B;YAC3B,IAAI,CAAC,KAAK,CACR,0EAA0E,CAC3E,CAAA;YAED,MAAM,uBAAuB,CAAC,eAAe,EAAE,WAAW,CAAC,CAAA;SAC5D;aAAM;YACL,4EAA4E;YAC5E,iFAAiF;YACjF,EAAE;YACF,mFAAmF;YACnF,gEAAgE;YAChE,oGAAoG;YAEpG,8FAA8F;YAC9F,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,SAAS,EAAE,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,CAAA;YACvE,MAAM,gBAAgB,GAAG,IAAI,gBAAgB,CAAC,aAAa,CAAC,CAAA;YAE5D,MAAM,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;YAExC,IAAI;gBACF,gBAAgB,CAAC,iBAAiB,EAAE,CAAA;gBACpC,MAAM,UAAU,GAAG,IAAI,kCAAe,EAAE,CAAA;gBACxC,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAA;gBACrC,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,EAAE;oBACjC,MAAM,YAAY,GAChB,gBAAgB,CAAC,aAAa,GAAG,gBAAgB,CAAC,WAAW,CAAA;oBAE/D,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAC1B,cAAc,EACd,aAAa,GAAG,YAAY,CAC7B,CAAA;oBAED,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC,CAAA;oBACzC,MAAM,MAAM,GAAG,MAAM,kBAAkB,CACrC,OAAO,CAAC,kBAAkB,IAAI,OAAO,EACrC,MAAM,CAAC,gBAAgB,CAAC,YAAY,EAAE,WAAW,EAAE;wBACjD,WAAW;wBACX,WAAW,EAAE,OAAO,CAAC,mBAAmB;wBACxC,UAAU,EAAE,gBAAgB,CAAC,UAAU,EAAE;qBAC1C,CAAC,CACH,CAAA;oBACD,IAAI,MAAM,KAAK,SAAS,EAAE;wBACxB,UAAU,CAAC,KAAK,EAAE,CAAA;wBAClB,MAAM,IAAI,KAAK,CACb,oEAAoE,CACrE,CAAA;qBACF;yBAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;wBAClC,EAAE,CAAC,aAAa,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;qBAC7B;iBACF;aACF;oBAAS;gBACR,gBAAgB,CAAC,gBAAgB,EAAE,CAAA;gBACnC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAA;aACjB;SACF;;CACF;AA1ED,0DA0EC;AAED,MAAM,kBAAkB,GAAG,CACzB,SAAiB,EACjB,OAAmB,EACE,EAAE;IACvB,IAAI,aAA6B,CAAA;IACjC,MAAM,cAAc,GAAG,IAAI,OAAO,CAAS,OAAO,CAAC,EAAE;QACnD,aAAa,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,SAAS,CAAC,CAAA;IACjE,CAAC,CAAC,CAAA;IAEF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QAC3D,YAAY,CAAC,aAAa,CAAC,CAAA;QAC3B,OAAO,MAAM,CAAA;IACf,CAAC,CAAC,CAAA;AACJ,CAAC,CAAA,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.d.ts b/node_modules/@actions/cache/lib/internal/requestUtils.d.ts new file mode 100644 index 0000000..5ca5084 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.d.ts @@ -0,0 +1,8 @@ +import { HttpClientResponse } from '@actions/http-client'; +import { ITypedResponseWithError } from './contracts'; +export declare function isSuccessStatusCode(statusCode?: number): boolean; +export declare function isServerErrorStatusCode(statusCode?: number): boolean; +export declare function isRetryableStatusCode(statusCode?: number): boolean; +export declare function retry(name: string, method: () => Promise, getStatusCode: (arg0: T) => number | undefined, maxAttempts?: number, delay?: number, onError?: ((arg0: Error) => T | undefined) | undefined): Promise; +export declare function retryTypedResponse(name: string, method: () => Promise>, maxAttempts?: number, delay?: number): Promise>; +export declare function retryHttpClientResponse(name: string, method: () => Promise, maxAttempts?: number, delay?: number): Promise; diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.js b/node_modules/@actions/cache/lib/internal/requestUtils.js new file mode 100644 index 0000000..ed1fdd1 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.js @@ -0,0 +1,137 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const constants_1 = require("./constants"); +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} +exports.isServerErrorStatusCode = isServerErrorStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { + return __awaiter(this, void 0, void 0, function* () { + let errorMessage = ''; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + try { + response = yield method(); + } + catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {}, + error + }; + } + else { + return undefined; + } + }); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; +//# sourceMappingURL=requestUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.js.map b/node_modules/@actions/cache/lib/internal/requestUtils.js.map new file mode 100644 index 0000000..5ea8634 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAI6B;AAC7B,2CAAmE;AAGnE,SAAgB,mBAAmB,CAAC,UAAmB;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;AAC9C,CAAC;AALD,kDAKC;AAED,SAAgB,uBAAuB,CAAC,UAAmB;IACzD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAA;KACZ;IACD,OAAO,UAAU,IAAI,GAAG,CAAA;AAC1B,CAAC;AALD,0DAKC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,MAAM,oBAAoB,GAAG;QAC3B,uBAAS,CAAC,UAAU;QACpB,uBAAS,CAAC,kBAAkB;QAC5B,uBAAS,CAAC,cAAc;KACzB,CAAA;IACD,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;AAClD,CAAC;AAVD,sDAUC;AAED,SAAe,KAAK,CAAC,YAAoB;;QACvC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;IAClE,CAAC;CAAA;AAED,SAAsB,KAAK,CACzB,IAAY,EACZ,MAAwB,EACxB,aAA8C,EAC9C,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB,EACzB,UAAwD,SAAS;;QAEjE,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI,QAAQ,GAAkB,SAAS,CAAA;YACvC,IAAI,UAAU,GAAuB,SAAS,CAAA;YAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;YAEvB,IAAI;gBACF,QAAQ,GAAG,MAAM,MAAM,EAAE,CAAA;aAC1B;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,OAAO,EAAE;oBACX,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;iBAC1B;gBAED,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,QAAQ,EAAE;gBACZ,UAAU,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAA;gBAEpC,IAAI,CAAC,uBAAuB,CAAC,UAAU,CAAC,EAAE;oBACxC,OAAO,QAAQ,CAAA;iBAChB;aACF;YAED,IAAI,UAAU,EAAE;gBACd,WAAW,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,gCAAgC,UAAU,EAAE,CAAA;aAC5D;YAED,IAAI,CAAC,KAAK,CACR,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC9C,MAAK;aACN;YAED,MAAM,KAAK,CAAC,KAAK,CAAC,CAAA;YAClB,OAAO,EAAE,CAAA;SACV;QAED,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AAtDD,sBAsDC;AAED,SAAsB,kBAAkB,CACtC,IAAY,EACZ,MAAiD,EACjD,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAAoC,EAAE,EAAE,CAAC,QAAQ,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK;QACL,8EAA8E;QAC9E,iEAAiE;QACjE,CAAC,KAAY,EAAE,EAAE;YACf,IAAI,KAAK,YAAY,6BAAe,EAAE;gBACpC,OAAO;oBACL,UAAU,EAAE,KAAK,CAAC,UAAU;oBAC5B,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;oBACX,KAAK;iBACN,CAAA;aACF;iBAAM;gBACL,OAAO,SAAS,CAAA;aACjB;QACH,CAAC,CACF,CAAA;IACH,CAAC;CAAA;AA3BD,gDA2BC;AAED,SAAsB,uBAAuB,CAC3C,IAAY,EACZ,MAAyC,EACzC,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAA4B,EAAE,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK,CACN,CAAA;IACH,CAAC;CAAA;AAbD,0DAaC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/tar.d.ts b/node_modules/@actions/cache/lib/internal/tar.d.ts new file mode 100644 index 0000000..0864013 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.d.ts @@ -0,0 +1,4 @@ +import { CompressionMethod } from './constants'; +export declare function listTar(archivePath: string, compressionMethod: CompressionMethod): Promise; +export declare function extractTar(archivePath: string, compressionMethod: CompressionMethod): Promise; +export declare function createTar(archiveFolder: string, sourceDirectories: string[], compressionMethod: CompressionMethod): Promise; diff --git a/node_modules/@actions/cache/lib/internal/tar.js b/node_modules/@actions/cache/lib/internal/tar.js new file mode 100644 index 0000000..166c058 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.js @@ -0,0 +1,272 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTar = exports.extractTar = exports.listTar = void 0; +const exec_1 = require("@actions/exec"); +const io = __importStar(require("@actions/io")); +const fs_1 = require("fs"); +const path = __importStar(require("path")); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +const IS_WINDOWS = process.platform === 'win32'; +// Returns tar path and type: BSD or GNU +function getTarPath() { + return __awaiter(this, void 0, void 0, function* () { + switch (process.platform) { + case 'win32': { + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + } + break; + } + case 'darwin': { + const gnuTar = yield io.which('gtar', false); + if (gnuTar) { + // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; + } + } + default: + break; + } + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; + }); +} +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; + } + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } + } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; + }); +} +function getWorkingDirectory() { + var _a; + return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); +} +// Common function for extractTar and listTar to get the compression method +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); +} +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, undefined, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) + }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); + }); +} +exports.listTar = listTar; +// Extract a tar +function extractTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); + }); +} +exports.extractTar = extractTar; +// Create a tar +function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); + }); +} +exports.createTar = createTar; +//# sourceMappingURL=tar.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/tar.js.map b/node_modules/@actions/cache/lib/internal/tar.js.map new file mode 100644 index 0000000..839ed79 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tar.js","sourceRoot":"","sources":["../../src/internal/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,wCAAkC;AAClC,gDAAiC;AACjC,2BAA4C;AAC5C,2CAA4B;AAC5B,oDAAqC;AAErC,2CAMoB;AAEpB,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C,wCAAwC;AACxC,SAAe,UAAU;;QACvB,QAAQ,OAAO,CAAC,QAAQ,EAAE;YACxB,KAAK,OAAO,CAAC,CAAC;gBACZ,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,sBAAsB,EAAE,CAAA;gBACnD,MAAM,SAAS,GAAG,kCAAsB,CAAA;gBACxC,IAAI,MAAM,EAAE;oBACV,mCAAmC;oBACnC,OAAoB,EAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,2BAAe,CAAC,GAAG,EAAC,CAAA;iBAC9D;qBAAM,IAAI,IAAA,eAAU,EAAC,SAAS,CAAC,EAAE;oBAChC,OAAoB,EAAC,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,2BAAe,CAAC,GAAG,EAAC,CAAA;iBACjE;gBACD,MAAK;aACN;YACD,KAAK,QAAQ,CAAC,CAAC;gBACb,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;gBAC5C,IAAI,MAAM,EAAE;oBACV,0HAA0H;oBAC1H,OAAoB,EAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,2BAAe,CAAC,GAAG,EAAC,CAAA;iBAC9D;qBAAM;oBACL,OAAoB;wBAClB,IAAI,EAAE,MAAM,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC;wBACjC,IAAI,EAAE,2BAAe,CAAC,GAAG;qBAC1B,CAAA;iBACF;aACF;YACD;gBACE,MAAK;SACR;QACD,mDAAmD;QACnD,OAAoB;YAClB,IAAI,EAAE,MAAM,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC;YACjC,IAAI,EAAE,2BAAe,CAAC,GAAG;SAC1B,CAAA;IACH,CAAC;CAAA;AAED,iFAAiF;AACjF,SAAe,UAAU,CACvB,OAAoB,EACpB,iBAAoC,EACpC,IAAY,EACZ,WAAW,GAAG,EAAE;;QAEhB,MAAM,IAAI,GAAG,CAAC,IAAI,OAAO,CAAC,IAAI,GAAG,CAAC,CAAA;QAClC,MAAM,aAAa,GAAG,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAAA;QAC/D,MAAM,OAAO,GAAG,WAAW,CAAA;QAC3B,MAAM,gBAAgB,GAAG,mBAAmB,EAAE,CAAA;QAC9C,sDAAsD;QACtD,MAAM,YAAY,GAChB,OAAO,CAAC,IAAI,KAAK,2BAAe,CAAC,GAAG;YACpC,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;YAC5C,UAAU,CAAA;QAEZ,uBAAuB;QACvB,QAAQ,IAAI,EAAE;YACZ,KAAK,QAAQ;gBACX,IAAI,CAAC,IAAI,CACP,SAAS,EACT,KAAK,EACL,YAAY;oBACV,CAAC,CAAC,OAAO;oBACT,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAChE,WAAW,EACX,YAAY;oBACV,CAAC,CAAC,OAAO;oBACT,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAChE,IAAI,EACJ,IAAI,EACJ,gBAAgB,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAC/D,cAAc,EACd,4BAAgB,CACjB,CAAA;gBACD,MAAK;YACP,KAAK,SAAS;gBACZ,IAAI,CAAC,IAAI,CACP,KAAK,EACL,YAAY;oBACV,CAAC,CAAC,OAAO;oBACT,CAAC,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAC9D,IAAI,EACJ,IAAI,EACJ,gBAAgB,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAChE,CAAA;gBACD,MAAK;YACP,KAAK,MAAM;gBACT,IAAI,CAAC,IAAI,CACP,KAAK,EACL,YAAY;oBACV,CAAC,CAAC,OAAO;oBACT,CAAC,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAC9D,IAAI,CACL,CAAA;gBACD,MAAK;SACR;QAED,yBAAyB;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,2BAAe,CAAC,GAAG,EAAE;YACxC,QAAQ,OAAO,CAAC,QAAQ,EAAE;gBACxB,KAAK,OAAO;oBACV,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;oBAC1B,MAAK;gBACP,KAAK,QAAQ;oBACX,IAAI,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAA;oBACtC,MAAK;aACR;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAED,sDAAsD;AACtD,SAAe,WAAW,CACxB,iBAAoC,EACpC,IAAY,EACZ,WAAW,GAAG,EAAE;;QAEhB,IAAI,IAAI,CAAA;QAER,MAAM,OAAO,GAAG,MAAM,UAAU,EAAE,CAAA;QAClC,MAAM,OAAO,GAAG,MAAM,UAAU,CAC9B,OAAO,EACP,iBAAiB,EACjB,IAAI,EACJ,WAAW,CACZ,CAAA;QACD,MAAM,eAAe,GACnB,IAAI,KAAK,QAAQ;YACf,CAAC,CAAC,MAAM,uBAAuB,CAAC,OAAO,EAAE,iBAAiB,EAAE,WAAW,CAAC;YACxE,CAAC,CAAC,MAAM,qBAAqB,CAAC,OAAO,EAAE,iBAAiB,CAAC,CAAA;QAC7D,MAAM,YAAY,GAChB,OAAO,CAAC,IAAI,KAAK,2BAAe,CAAC,GAAG;YACpC,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;YAC5C,UAAU,CAAA;QAEZ,IAAI,YAAY,IAAI,IAAI,KAAK,QAAQ,EAAE;YACrC,IAAI,GAAG,CAAC,CAAC,GAAG,eAAe,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;SAChE;aAAM;YACL,IAAI,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,YAAY,EAAE;YAChB,OAAO,IAAI,CAAA;SACZ;QAED,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;IACzB,CAAC;CAAA;AAED,SAAS,mBAAmB;;IAC1B,OAAO,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;AACzD,CAAC;AAED,2EAA2E;AAC3E,SAAe,uBAAuB,CACpC,OAAoB,EACpB,iBAAoC,EACpC,WAAmB;;QAEnB,kBAAkB;QAClB,oCAAoC;QACpC,iHAAiH;QACjH,oEAAoE;QACpE,MAAM,YAAY,GAChB,OAAO,CAAC,IAAI,KAAK,2BAAe,CAAC,GAAG;YACpC,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;YAC5C,UAAU,CAAA;QACZ,QAAQ,iBAAiB,EAAE;YACzB,KAAK,6BAAiB,CAAC,IAAI;gBACzB,OAAO,YAAY;oBACjB,CAAC,CAAC;wBACE,8BAA8B;wBAC9B,uBAAW;wBACX,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;qBAC3D;oBACH,CAAC,CAAC;wBACE,wBAAwB;wBACxB,UAAU,CAAC,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC,kBAAkB;qBACxD,CAAA;YACP,KAAK,6BAAiB,CAAC,eAAe;gBACpC,OAAO,YAAY;oBACjB,CAAC,CAAC;wBACE,oBAAoB;wBACpB,uBAAW;wBACX,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;qBAC3D;oBACH,CAAC,CAAC,CAAC,wBAAwB,EAAE,UAAU,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAA;YACrE;gBACE,OAAO,CAAC,IAAI,CAAC,CAAA;SAChB;IACH,CAAC;CAAA;AAED,gCAAgC;AAChC,+GAA+G;AAC/G,qCAAqC;AACrC,iHAAiH;AACjH,oEAAoE;AACpE,0GAA0G;AAC1G,SAAe,qBAAqB,CAClC,OAAoB,EACpB,iBAAoC;;QAEpC,MAAM,aAAa,GAAG,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAAA;QAC/D,MAAM,YAAY,GAChB,OAAO,CAAC,IAAI,KAAK,2BAAe,CAAC,GAAG;YACpC,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;YAC5C,UAAU,CAAA;QACZ,QAAQ,iBAAiB,EAAE;YACzB,KAAK,6BAAiB,CAAC,IAAI;gBACzB,OAAO,YAAY;oBACjB,CAAC,CAAC;wBACE,+BAA+B;wBAC/B,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;wBAC5D,uBAAW;qBACZ;oBACH,CAAC,CAAC;wBACE,wBAAwB;wBACxB,UAAU,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,kBAAkB;qBACzD,CAAA;YACP,KAAK,6BAAiB,CAAC,eAAe;gBACpC,OAAO,YAAY;oBACjB,CAAC,CAAC;wBACE,qBAAqB;wBACrB,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;wBAC5D,uBAAW;qBACZ;oBACH,CAAC,CAAC,CAAC,wBAAwB,EAAE,UAAU,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAA;YACtE;gBACE,OAAO,CAAC,IAAI,CAAC,CAAA;SAChB;IACH,CAAC;CAAA;AAED,8CAA8C;AAC9C,SAAe,YAAY,CAAC,QAAkB,EAAE,GAAY;;QAC1D,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IAAI;gBACF,MAAM,IAAA,WAAI,EAAC,OAAO,EAAE,SAAS,EAAE;oBAC7B,GAAG;oBACH,GAAG,kCAAO,OAAO,CAAC,GAAc,KAAE,IAAI,EAAE,0BAA0B,GAAC;iBACpE,CAAC,CAAA;aACH;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,uBAAuB,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EAAE,CAChE,CAAA;aACF;SACF;IACH,CAAC;CAAA;AAED,6BAA6B;AAC7B,SAAsB,OAAO,CAC3B,WAAmB,EACnB,iBAAoC;;QAEpC,MAAM,QAAQ,GAAG,MAAM,WAAW,CAAC,iBAAiB,EAAE,MAAM,EAAE,WAAW,CAAC,CAAA;QAC1E,MAAM,YAAY,CAAC,QAAQ,CAAC,CAAA;IAC9B,CAAC;CAAA;AAND,0BAMC;AAED,gBAAgB;AAChB,SAAsB,UAAU,CAC9B,WAAmB,EACnB,iBAAoC;;QAEpC,uCAAuC;QACvC,MAAM,gBAAgB,GAAG,mBAAmB,EAAE,CAAA;QAC9C,MAAM,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAA;QACjC,MAAM,QAAQ,GAAG,MAAM,WAAW,CAAC,iBAAiB,EAAE,SAAS,EAAE,WAAW,CAAC,CAAA;QAC7E,MAAM,YAAY,CAAC,QAAQ,CAAC,CAAA;IAC9B,CAAC;CAAA;AATD,gCASC;AAED,eAAe;AACf,SAAsB,SAAS,CAC7B,aAAqB,EACrB,iBAA2B,EAC3B,iBAAoC;;QAEpC,0EAA0E;QAC1E,IAAA,kBAAa,EACX,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,4BAAgB,CAAC,EAC1C,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,CAC7B,CAAA;QACD,MAAM,QAAQ,GAAG,MAAM,WAAW,CAAC,iBAAiB,EAAE,QAAQ,CAAC,CAAA;QAC/D,MAAM,YAAY,CAAC,QAAQ,EAAE,aAAa,CAAC,CAAA;IAC7C,CAAC;CAAA;AAZD,8BAYC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/options.d.ts b/node_modules/@actions/cache/lib/options.d.ts new file mode 100644 index 0000000..33c94c9 --- /dev/null +++ b/node_modules/@actions/cache/lib/options.d.ts @@ -0,0 +1,75 @@ +/** + * Options to control cache upload + */ +export interface UploadOptions { + /** + * Number of parallel cache upload + * + * @default 4 + */ + uploadConcurrency?: number; + /** + * Maximum chunk size in bytes for cache upload + * + * @default 32MB + */ + uploadChunkSize?: number; +} +/** + * Options to control cache download + */ +export interface DownloadOptions { + /** + * Indicates whether to use the Azure Blob SDK to download caches + * that are stored on Azure Blob Storage to improve reliability and + * performance + * + * @default true + */ + useAzureSdk?: boolean; + /** + * Number of parallel downloads (this option only applies when using + * the Azure SDK) + * + * @default 8 + */ + downloadConcurrency?: number; + /** + * Indicates whether to use Actions HttpClient with concurrency + * for Azure Blob Storage + */ + concurrentBlobDownloads?: boolean; + /** + * Maximum time for each download request, in milliseconds (this + * option only applies when using the Azure SDK) + * + * @default 30000 + */ + timeoutInMs?: number; + /** + * Time after which a segment download should be aborted if stuck + * + * @default 3600000 + */ + segmentTimeoutInMs?: number; + /** + * Weather to skip downloading the cache entry. + * If lookupOnly is set to true, the restore function will only check if + * a matching cache entry exists and return the cache key if it does. + * + * @default false + */ + lookupOnly?: boolean; +} +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +export declare function getUploadOptions(copy?: UploadOptions): UploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +export declare function getDownloadOptions(copy?: DownloadOptions): DownloadOptions; diff --git a/node_modules/@actions/cache/lib/options.js b/node_modules/@actions/cache/lib/options.js new file mode 100644 index 0000000..c5021d0 --- /dev/null +++ b/node_modules/@actions/cache/lib/options.js @@ -0,0 +1,100 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getDownloadOptions = exports.getUploadOptions = void 0; +const core = __importStar(require("@actions/core")); +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +function getUploadOptions(copy) { + const result = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + core.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; +} +exports.getUploadOptions = getUploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +function getDownloadOptions(copy) { + const result = { + useAzureSdk: false, + concurrentBlobDownloads: true, + downloadConcurrency: 8, + timeoutInMs: 30000, + segmentTimeoutInMs: 600000, + lookupOnly: false + }; + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.concurrentBlobDownloads === 'boolean') { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs; + } + if (typeof copy.segmentTimeoutInMs === 'number') { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } + } + const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; + if (segmentDownloadTimeoutMins && + !isNaN(Number(segmentDownloadTimeoutMins)) && + isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000; + } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core.debug(`Download concurrency: ${result.downloadConcurrency}`); + core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); + core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); + return result; +} +exports.getDownloadOptions = getDownloadOptions; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/options.js.map b/node_modules/@actions/cache/lib/options.js.map new file mode 100644 index 0000000..6dcb006 --- /dev/null +++ b/node_modules/@actions/cache/lib/options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAwErC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,MAAM,MAAM,GAAkB;QAC5B,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AApBD,4CAoBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/.bin/uuid b/node_modules/@actions/cache/node_modules/.bin/uuid new file mode 120000 index 0000000..b3e45bc --- /dev/null +++ b/node_modules/@actions/cache/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/bin/uuid \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/uuid/AUTHORS b/node_modules/@actions/cache/node_modules/uuid/AUTHORS new file mode 100644 index 0000000..5a10523 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/AUTHORS @@ -0,0 +1,5 @@ +Robert Kieffer +Christoph Tavan +AJ ONeal +Vincent Voyer +Roman Shtylman diff --git a/node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md b/node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000..f811b8a --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,119 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + + +### Features + +* rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + + +### Bug Fixes + +* typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + + + + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + + +### Bug Fixes + +* fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + + + + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + + +### Bug Fixes + +* assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +* fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +* Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +* mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +* enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + + + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + + +### Bug Fixes + +* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + + + + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + + +### Bug Fixes + +* remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + + +### Features + +* Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +* (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +* Fix typo (#178) +* Simple typo fix (#165) + +### Features +* v5 support in CLI (#197) +* V5 support (#188) + + +# 3.0.1 (2016-11-28) + +* split uuid versions into separate files + + +# 3.0.0 (2016-11-17) + +* remove .parse and .unparse + + +# 2.0.0 + +* Removed uuid.BufferClass + + +# 1.4.0 + +* Improved module context detection +* Removed public RNG functions + + +# 1.3.2 + +* Improve tests and handling of v1() options (Issue #24) +* Expose RNG option to allow for perf testing with different generators + + +# 1.3.0 + +* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +* Support for node.js crypto API +* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/@actions/cache/node_modules/uuid/LICENSE.md b/node_modules/@actions/cache/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000..8c84e39 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010-2016 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@actions/cache/node_modules/uuid/README.md b/node_modules/@actions/cache/node_modules/uuid/README.md new file mode 100644 index 0000000..1752e47 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/README.md @@ -0,0 +1,276 @@ + + +# uuid [![Build Status](https://secure.travis-ci.org/kelektiv/node-uuid.svg?branch=master)](http://travis-ci.org/kelektiv/node-uuid) # + +Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. + +Features: + +* Support for version 1, 3, 4 and 5 UUIDs +* Cross-platform +* Uses cryptographically-strong random number APIs (when available) +* Zero-dependency, small footprint (... but not [this small](https://gist.github.com/982883)) + +[**Deprecation warning**: The use of `require('uuid')` is deprecated and will not be +supported after version 3.x of this module. Instead, use `require('uuid/[v1|v3|v4|v5]')` as shown in the examples below.] + +## Quickstart - CommonJS (Recommended) + +```shell +npm install uuid +``` + +Then generate your uuid version of choice ... + +Version 1 (timestamp): + +```javascript +const uuidv1 = require('uuid/v1'); +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' + +``` + +Version 3 (namespace): + +```javascript +const uuidv3 = require('uuid/v3'); + +// ... using predefined DNS namespace (for domain names) +uuidv3('hello.example.com', uuidv3.DNS); // ⇨ '9125a8dc-52ee-365b-a5aa-81b0b3681cf6' + +// ... using predefined URL namespace (for, well, URLs) +uuidv3('http://example.com/hello', uuidv3.URL); // ⇨ 'c6235813-3ba4-3801-ae84-e0a6ebb7d138' + +// ... using a custom namespace +// +// Note: Custom namespaces should be a UUID string specific to your application! +// E.g. the one here was generated using this modules `uuid` CLI. +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; +uuidv3('Hello, World!', MY_NAMESPACE); // ⇨ 'e8b5a51d-11c8-3310-a6ab-367563f20686' + +``` + +Version 4 (random): + +```javascript +const uuidv4 = require('uuid/v4'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' + +``` + +Version 5 (namespace): + +```javascript +const uuidv5 = require('uuid/v5'); + +// ... using predefined DNS namespace (for domain names) +uuidv5('hello.example.com', uuidv5.DNS); // ⇨ 'fdda765f-fc57-5604-a269-52a7df8164ec' + +// ... using predefined URL namespace (for, well, URLs) +uuidv5('http://example.com/hello', uuidv5.URL); // ⇨ '3bbcee75-cecc-5b56-8031-b6641c1ed1f1' + +// ... using a custom namespace +// +// Note: Custom namespaces should be a UUID string specific to your application! +// E.g. the one here was generated using this modules `uuid` CLI. +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' + +``` + +## API + +### Version 1 + +```javascript +const uuidv1 = require('uuid/v1'); + +// Incantations +uuidv1(); +uuidv1(options); +uuidv1(options, buffer, offset); +``` + +Generate and return a RFC4122 v1 (timestamp-based) UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. + * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. + * `msecs` - (Number) Time in milliseconds since unix Epoch. Default: The current time is used. + * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Example: Generate string UUID with fully-specified options + +```javascript +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678 +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' + +``` + +Example: In-place generation of two binary IDs + +```javascript +// Generate two ids in an array +const arr = new Array(); +uuidv1(null, arr, 0); // ⇨ + // [ + // 44, 94, 164, 192, 64, 103, + // 17, 233, 146, 52, 155, 29, + // 235, 77, 59, 125 + // ] +uuidv1(null, arr, 16); // ⇨ + // [ + // 44, 94, 164, 192, 64, 103, 17, 233, + // 146, 52, 155, 29, 235, 77, 59, 125, + // 44, 94, 164, 193, 64, 103, 17, 233, + // 146, 52, 155, 29, 235, 77, 59, 125 + // ] + +``` + +### Version 3 + +```javascript +const uuidv3 = require('uuid/v3'); + +// Incantations +uuidv3(name, namespace); +uuidv3(name, namespace, buffer); +uuidv3(name, namespace, buffer, offset); +``` + +Generate and return a RFC4122 v3 UUID. + +* `name` - (String | Array[]) "name" to create UUID with +* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: + +```javascript +uuidv3('hello world', MY_NAMESPACE); // ⇨ '042ffd34-d989-321c-ad06-f60826172424' + +``` + +### Version 4 + +```javascript +const uuidv4 = require('uuid/v4') + +// Incantations +uuidv4(); +uuidv4(options); +uuidv4(options, buffer, offset); +``` + +Generate and return a RFC4122 v4 UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values + * `rng` - (Function) Random # generator function that returns an Array[16] of byte values (0-255) +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: Generate string UUID with predefined `random` values + +```javascript +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, + 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 + ] +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' + +``` + +Example: Generate two IDs in a single buffer + +```javascript +const buffer = new Array(); +uuidv4(null, buffer, 0); // ⇨ + // [ + // 155, 29, 235, 77, 59, + // 125, 75, 173, 155, 221, + // 43, 13, 123, 61, 203, + // 109 + // ] +uuidv4(null, buffer, 16); // ⇨ + // [ + // 155, 29, 235, 77, 59, 125, 75, 173, + // 155, 221, 43, 13, 123, 61, 203, 109, + // 27, 157, 107, 205, 187, 253, 75, 45, + // 155, 93, 171, 141, 251, 189, 75, 237 + // ] + +``` + +### Version 5 + +```javascript +const uuidv5 = require('uuid/v5'); + +// Incantations +uuidv5(name, namespace); +uuidv5(name, namespace, buffer); +uuidv5(name, namespace, buffer, offset); +``` + +Generate and return a RFC4122 v5 UUID. + +* `name` - (String | Array[]) "name" to create UUID with +* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: + +```javascript +uuidv5('hello world', MY_NAMESPACE); // ⇨ '9f282611-e0fd-5650-8953-89c8e342da0b' + +``` + +## Command Line + +UUIDs can be generated from the command line with the `uuid` command. + +```shell +$ uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 + +$ uuid v1 +02d37060-d446-11e7-a9fa-7bdae751ebe1 +``` + +Type `uuid --help` for usage details + +## Testing + +```shell +npm test +``` + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/uuid/bin/uuid b/node_modules/@actions/cache/node_modules/uuid/bin/uuid new file mode 100755 index 0000000..502626e --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/bin/uuid @@ -0,0 +1,65 @@ +#!/usr/bin/env node +var assert = require('assert'); + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +var args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} +var version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + var uuidV1 = require('../v1'); + console.log(uuidV1()); + break; + + case 'v3': + var uuidV3 = require('../v3'); + + var name = args.shift(); + var namespace = args.shift(); + assert(name != null, 'v3 name not specified'); + assert(namespace != null, 'v3 namespace not specified'); + + if (namespace == 'URL') namespace = uuidV3.URL; + if (namespace == 'DNS') namespace = uuidV3.DNS; + + console.log(uuidV3(name, namespace)); + break; + + case 'v4': + var uuidV4 = require('../v4'); + console.log(uuidV4()); + break; + + case 'v5': + var uuidV5 = require('../v5'); + + var name = args.shift(); + var namespace = args.shift(); + assert(name != null, 'v5 name not specified'); + assert(namespace != null, 'v5 namespace not specified'); + + if (namespace == 'URL') namespace = uuidV5.URL; + if (namespace == 'DNS') namespace = uuidV5.DNS; + + console.log(uuidV5(name, namespace)); + break; + + default: + usage(); + process.exit(1); +} diff --git a/node_modules/@actions/cache/node_modules/uuid/index.js b/node_modules/@actions/cache/node_modules/uuid/index.js new file mode 100644 index 0000000..e96791a --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/index.js @@ -0,0 +1,8 @@ +var v1 = require('./v1'); +var v4 = require('./v4'); + +var uuid = v4; +uuid.v1 = v1; +uuid.v4 = v4; + +module.exports = uuid; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js b/node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js new file mode 100644 index 0000000..24b6041 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js @@ -0,0 +1,26 @@ +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js b/node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js new file mode 100644 index 0000000..9b3b6c7 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js @@ -0,0 +1,216 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ + +'use strict'; + +function md5(bytes) { + if (typeof(bytes) == 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + bytes = new Array(msg.length); + for (var i = 0; i < msg.length; i++) bytes[i] = msg.charCodeAt(i); + } + + return md5ToHexEncodedArray( + wordsToMd5( + bytesToWords(bytes) + , bytes.length * 8) + ); +} + + +/* +* Convert an array of little-endian words to an array of bytes +*/ +function md5ToHexEncodedArray(input) { + var i; + var x; + var output = []; + var length32 = input.length * 32; + var hexTab = '0123456789abcdef'; + var hex; + + for (i = 0; i < length32; i += 8) { + x = (input[i >> 5] >>> (i % 32)) & 0xFF; + + hex = parseInt(hexTab.charAt((x >>> 4) & 0x0F) + hexTab.charAt(x & 0x0F), 16); + + output.push(hex); + } + return output; +} + +/* +* Calculate the MD5 of an array of little-endian words, and a bit length. +*/ +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << (len % 32); + x[(((len + 64) >>> 9) << 4) + 14] = len; + + var i; + var olda; + var oldb; + var oldc; + var oldd; + var a = 1732584193; + var b = -271733879; + var c = -1732584194; + + var d = 271733878; + + for (i = 0; i < x.length; i += 16) { + olda = a; + oldb = b; + oldc = c; + oldd = d; + + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + return [a, b, c, d]; +} + +/* +* Convert an array bytes to an array of little-endian words +* Characters >255 have their high-byte silently ignored. +*/ +function bytesToWords(input) { + var i; + var output = []; + output[(input.length >> 2) - 1] = undefined; + for (i = 0; i < output.length; i += 1) { + output[i] = 0; + } + var length8 = input.length * 8; + for (i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[(i / 8)] & 0xFF) << (i % 32); + } + + return output; +} + +/* +* Add integers, wrapping at 2^32. This uses 16-bit operations internally +* to work around bugs in some JS interpreters. +*/ +function safeAdd(x, y) { + var lsw = (x & 0xFFFF) + (y & 0xFFFF); + var msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return (msw << 16) | (lsw & 0xFFFF); +} + +/* +* Bitwise rotate a 32-bit number to the left. +*/ +function bitRotateLeft(num, cnt) { + return (num << cnt) | (num >>> (32 - cnt)); +} + +/* +* These functions implement the four basic operations the algorithm uses. +*/ +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} +function md5ff(a, b, c, d, x, s, t) { + return md5cmn((b & c) | ((~b) & d), a, b, x, s, t); +} +function md5gg(a, b, c, d, x, s, t) { + return md5cmn((b & d) | (c & (~d)), a, b, x, s, t); +} +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | (~d)), a, b, x, s, t); +} + +module.exports = md5; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/md5.js b/node_modules/@actions/cache/node_modules/uuid/lib/md5.js new file mode 100644 index 0000000..7044b87 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/md5.js @@ -0,0 +1,25 @@ +'use strict'; + +var crypto = require('crypto'); + +function md5(bytes) { + if (typeof Buffer.from === 'function') { + // Modern Buffer API + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + } else { + // Pre-v4 Buffer API + if (Array.isArray(bytes)) { + bytes = new Buffer(bytes); + } else if (typeof bytes === 'string') { + bytes = new Buffer(bytes, 'utf8'); + } + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +module.exports = md5; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js b/node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js new file mode 100644 index 0000000..6361fb8 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js @@ -0,0 +1,34 @@ +// Unique ID creation requires a high quality random # generator. In the +// browser this is a little complicated due to unknown quality of Math.random() +// and inconsistent support for the `crypto` API. We do the best we can via +// feature-detection + +// getRandomValues needs to be invoked in a context where "this" is a Crypto +// implementation. Also, find the complete implementation of crypto on IE11. +var getRandomValues = (typeof(crypto) != 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto)) || + (typeof(msCrypto) != 'undefined' && typeof window.msCrypto.getRandomValues == 'function' && msCrypto.getRandomValues.bind(msCrypto)); + +if (getRandomValues) { + // WHATWG crypto RNG - http://wiki.whatwg.org/wiki/Crypto + var rnds8 = new Uint8Array(16); // eslint-disable-line no-undef + + module.exports = function whatwgRNG() { + getRandomValues(rnds8); + return rnds8; + }; +} else { + // Math.random()-based (RNG) + // + // If all else fails, use Math.random(). It's fast, but is of unspecified + // quality. + var rnds = new Array(16); + + module.exports = function mathRNG() { + for (var i = 0, r; i < 16; i++) { + if ((i & 0x03) === 0) r = Math.random() * 0x100000000; + rnds[i] = r >>> ((i & 0x03) << 3) & 0xff; + } + + return rnds; + }; +} diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/rng.js b/node_modules/@actions/cache/node_modules/uuid/lib/rng.js new file mode 100644 index 0000000..58f0dc9 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/rng.js @@ -0,0 +1,8 @@ +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = require('crypto'); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js b/node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js new file mode 100644 index 0000000..5758ed7 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js @@ -0,0 +1,89 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +'use strict'; + +function f(s, x, y, z) { + switch (s) { + case 0: return (x & y) ^ (~x & z); + case 1: return x ^ y ^ z; + case 2: return (x & y) ^ (x & z) ^ (y & z); + case 3: return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return (x << n) | (x>>> (32 - n)); +} + +function sha1(bytes) { + var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof(bytes) == 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + bytes = new Array(msg.length); + for (var i = 0; i < msg.length; i++) bytes[i] = msg.charCodeAt(i); + } + + bytes.push(0x80); + + var l = bytes.length/4 + 2; + var N = Math.ceil(l/16); + var M = new Array(N); + + for (var i=0; i>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = (H[0] + a) >>> 0; + H[1] = (H[1] + b) >>> 0; + H[2] = (H[2] + c) >>> 0; + H[3] = (H[3] + d) >>> 0; + H[4] = (H[4] + e) >>> 0; + } + + return [ + H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, + H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, + H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, + H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, + H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff + ]; +} + +module.exports = sha1; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/sha1.js b/node_modules/@actions/cache/node_modules/uuid/lib/sha1.js new file mode 100644 index 0000000..0b54b25 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/sha1.js @@ -0,0 +1,25 @@ +'use strict'; + +var crypto = require('crypto'); + +function sha1(bytes) { + if (typeof Buffer.from === 'function') { + // Modern Buffer API + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + } else { + // Pre-v4 Buffer API + if (Array.isArray(bytes)) { + bytes = new Buffer(bytes); + } else if (typeof bytes === 'string') { + bytes = new Buffer(bytes, 'utf8'); + } + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +module.exports = sha1; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/v35.js b/node_modules/@actions/cache/node_modules/uuid/lib/v35.js new file mode 100644 index 0000000..8b066cc --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/v35.js @@ -0,0 +1,57 @@ +var bytesToUuid = require('./bytesToUuid'); + +function uuidToBytes(uuid) { + // Note: We assume we're being passed a valid uuid string + var bytes = []; + uuid.replace(/[a-fA-F0-9]{2}/g, function(hex) { + bytes.push(parseInt(hex, 16)); + }); + + return bytes; +} + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + var bytes = new Array(str.length); + for (var i = 0; i < str.length; i++) { + bytes[i] = str.charCodeAt(i); + } + return bytes; +} + +module.exports = function(name, version, hashfunc) { + var generateUUID = function(value, namespace, buf, offset) { + var off = buf && offset || 0; + + if (typeof(value) == 'string') value = stringToBytes(value); + if (typeof(namespace) == 'string') namespace = uuidToBytes(namespace); + + if (!Array.isArray(value)) throw TypeError('value must be an array of bytes'); + if (!Array.isArray(namespace) || namespace.length !== 16) throw TypeError('namespace must be uuid string or an Array of 16 byte values'); + + // Per 4.3 + var bytes = hashfunc(namespace.concat(value)); + bytes[6] = (bytes[6] & 0x0f) | version; + bytes[8] = (bytes[8] & 0x3f) | 0x80; + + if (buf) { + for (var idx = 0; idx < 16; ++idx) { + buf[off+idx] = bytes[idx]; + } + } + + return buf || bytesToUuid(bytes); + }; + + // Function#name is not settable on some platforms (#270) + try { + generateUUID.name = name; + } catch (err) { + } + + // Pre-defined namespaces, per Appendix C + generateUUID.DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; + generateUUID.URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; + + return generateUUID; +}; diff --git a/node_modules/@actions/cache/node_modules/uuid/package.json b/node_modules/@actions/cache/node_modules/uuid/package.json new file mode 100644 index 0000000..efc07b8 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/package.json @@ -0,0 +1,49 @@ +{ + "name": "uuid", + "version": "3.4.0", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./bin/uuid" + }, + "devDependencies": { + "@commitlint/cli": "~8.2.0", + "@commitlint/config-conventional": "~8.2.0", + "eslint": "~6.4.0", + "husky": "~3.0.5", + "mocha": "6.2.0", + "runmd": "1.2.1", + "standard-version": "7.0.0" + }, + "scripts": { + "lint": "eslint .", + "test": "npm run lint && mocha test/test.js", + "md": "runmd --watch --output=README.md README_js.md", + "release": "standard-version", + "prepare": "runmd --output=README.md README_js.md" + }, + "browser": { + "./lib/rng.js": "./lib/rng-browser.js", + "./lib/sha1.js": "./lib/sha1-browser.js", + "./lib/md5.js": "./lib/md5-browser.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS" + } + } +} diff --git a/node_modules/@actions/cache/node_modules/uuid/v1.js b/node_modules/@actions/cache/node_modules/uuid/v1.js new file mode 100644 index 0000000..8c245de --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v1.js @@ -0,0 +1,109 @@ +var rng = require('./lib/rng'); +var bytesToUuid = require('./lib/bytesToUuid'); + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; +var _clockseq; + +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; + +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; + + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : bytesToUuid(b); +} + +module.exports = v1; diff --git a/node_modules/@actions/cache/node_modules/uuid/v3.js b/node_modules/@actions/cache/node_modules/uuid/v3.js new file mode 100644 index 0000000..ee7e14c --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v3.js @@ -0,0 +1,4 @@ +var v35 = require('./lib/v35.js'); +var md5 = require('./lib/md5'); + +module.exports = v35('v3', 0x30, md5); \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/uuid/v4.js b/node_modules/@actions/cache/node_modules/uuid/v4.js new file mode 100644 index 0000000..1f07be1 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v4.js @@ -0,0 +1,29 @@ +var rng = require('./lib/rng'); +var bytesToUuid = require('./lib/bytesToUuid'); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; diff --git a/node_modules/@actions/cache/node_modules/uuid/v5.js b/node_modules/@actions/cache/node_modules/uuid/v5.js new file mode 100644 index 0000000..4945baf --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v5.js @@ -0,0 +1,3 @@ +var v35 = require('./lib/v35.js'); +var sha1 = require('./lib/sha1'); +module.exports = v35('v5', 0x50, sha1); diff --git a/node_modules/@actions/cache/package.json b/node_modules/@actions/cache/package.json new file mode 100644 index 0000000..d325108 --- /dev/null +++ b/node_modules/@actions/cache/package.json @@ -0,0 +1,56 @@ +{ + "name": "@actions/cache", + "version": "3.2.4", + "preview": true, + "description": "Actions cache lib", + "keywords": [ + "github", + "actions", + "cache" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/cache", + "license": "MIT", + "main": "lib/cache.js", + "types": "lib/cache.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/cache" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "semver": "^6.3.1", + "uuid": "^3.3.3" + }, + "devDependencies": { + "@types/semver": "^6.0.0", + "@types/uuid": "^3.4.5", + "typescript": "^5.2.2" + } +} diff --git a/node_modules/@actions/glob/LICENSE.md b/node_modules/@actions/glob/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/node_modules/@actions/glob/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/glob/README.md b/node_modules/@actions/glob/README.md new file mode 100644 index 0000000..9575bea --- /dev/null +++ b/node_modules/@actions/glob/README.md @@ -0,0 +1,113 @@ +# `@actions/glob` + +## Usage + +### Basic + +You can use this package to search for files matching glob patterns. + +Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory. + +```js +const glob = require('@actions/glob'); + +const patterns = ['**/tar.gz', '**/tar.bz'] +const globber = await glob.create(patterns.join('\n')) +const files = await globber.glob() +``` + +### Opt out of following symbolic links + +```js +const glob = require('@actions/glob'); + +const globber = await glob.create('**', {followSymbolicLinks: false}) +const files = await globber.glob() +``` + +### Iterator + +When dealing with a large amount of results, consider iterating the results as they are returned: + +```js +const glob = require('@actions/glob'); + +const globber = await glob.create('**') +for await (const file of globber.globGenerator()) { + console.log(file) +} +``` + +## Recommended action inputs + +Glob follows symbolic links by default. Following is often appropriate unless deleting files. + +Users may want to opt-out from following symbolic links for other reasons. For example, +excessive amounts of symbolic links can create the appearance of very, very many files +and slow the search. + +When an action allows a user to specify input patterns, it is generally recommended to +allow users to opt-out from following symbolic links. + +Snippet from `action.yml`: + +```yaml +inputs: + files: + description: 'Files to print' + required: true + follow-symbolic-links: + description: 'Indicates whether to follow symbolic links' + default: true +``` + +And corresponding toolkit consumption: + +```js +const core = require('@actions/core') +const glob = require('@actions/glob') + +const globOptions = { + followSymbolicLinks: core.getInput('follow-symbolic-links').toUpper() !== 'FALSE' +} +const globber = glob.create(core.getInput('files'), globOptions) +for await (const file of globber.globGenerator()) { + console.log(file) +} +``` + +## Patterns + +### Glob behavior + +Patterns `*`, `?`, `[...]`, `**` (globstar) are supported. + +With the following behaviors: +- File names that begin with `.` may be included in the results +- Case insensitive on Windows +- Directory separator `/` and `\` both supported on Windows + +### Tilde expansion + +Supports basic tilde expansion, for current user HOME replacement only. + +Example: +- `~` may expand to /Users/johndoe +- `~/foo` may expand to /Users/johndoe/foo + +### Comments + +Patterns that begin with `#` are treated as comments. + +### Exclude patterns + +Leading `!` changes the meaning of an include pattern to exclude. + +Multiple leading `!` flips the meaning. + +### Escaping + +Wrapping special characters in `[]` can be used to escape literal glob characters +in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`. + +On Linux/macOS `\` is also treated as an escape character. diff --git a/node_modules/@actions/glob/lib/glob.d.ts b/node_modules/@actions/glob/lib/glob.d.ts new file mode 100644 index 0000000..56424fc --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.d.ts @@ -0,0 +1,10 @@ +import { Globber } from './internal-globber'; +import { GlobOptions } from './internal-glob-options'; +export { Globber, GlobOptions }; +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +export declare function create(patterns: string, options?: GlobOptions): Promise; diff --git a/node_modules/@actions/glob/lib/glob.js b/node_modules/@actions/glob/lib/glob.js new file mode 100644 index 0000000..58cb683 --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.js @@ -0,0 +1,26 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const internal_globber_1 = require("./internal-globber"); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/glob.js.map b/node_modules/@actions/glob/lib/glob.js.map new file mode 100644 index 0000000..b27e7e4 --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../src/glob.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,yDAA0D;AAK1D;;;;;GAKG;AACH,SAAsB,MAAM,CAC1B,QAAgB,EAChB,OAAqB;;QAErB,OAAO,MAAM,iCAAc,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAA;IACvD,CAAC;CAAA;AALD,wBAKC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts b/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts new file mode 100644 index 0000000..8a78960 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts @@ -0,0 +1,5 @@ +import { GlobOptions } from './internal-glob-options'; +/** + * Returns a copy with defaults filled in. + */ +export declare function getOptions(copy?: GlobOptions): GlobOptions; diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.js b/node_modules/@actions/glob/lib/internal-glob-options-helper.js new file mode 100644 index 0000000..974dfd0 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.js @@ -0,0 +1,50 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOptions = void 0; +const core = __importStar(require("@actions/core")); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map b/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map new file mode 100644 index 0000000..93376dd --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-glob-options-helper.js","sourceRoot":"","sources":["../src/internal-glob-options-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAGrC;;GAEG;AACH,SAAgB,UAAU,CAAC,IAAkB;IAC3C,MAAM,MAAM,GAAgB;QAC1B,mBAAmB,EAAE,IAAI;QACzB,mBAAmB,EAAE,IAAI;QACzB,uBAAuB,EAAE,IAAI;KAC9B,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;YAC7D,IAAI,CAAC,KAAK,CAAC,4BAA4B,MAAM,CAAC,uBAAuB,GAAG,CAAC,CAAA;SAC1E;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAzBD,gCAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options.d.ts b/node_modules/@actions/glob/lib/internal-glob-options.d.ts new file mode 100644 index 0000000..c1cac0f --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.d.ts @@ -0,0 +1,29 @@ +/** + * Options to control globbing behavior + */ +export interface GlobOptions { + /** + * Indicates whether to follow symbolic links. Generally should set to false + * when deleting files. + * + * @default true + */ + followSymbolicLinks?: boolean; + /** + * Indicates whether directories that match a glob pattern, should implicitly + * cause all descendant paths to be matched. + * + * For example, given the directory `my-dir`, the following glob patterns + * would produce the same results: `my-dir/**`, `my-dir/`, `my-dir` + * + * @default true + */ + implicitDescendants?: boolean; + /** + * Indicates whether broken symbolic should be ignored and omitted from the + * result set. Otherwise an error will be thrown. + * + * @default true + */ + omitBrokenSymbolicLinks?: boolean; +} diff --git a/node_modules/@actions/glob/lib/internal-glob-options.js b/node_modules/@actions/glob/lib/internal-glob-options.js new file mode 100644 index 0000000..8960c70 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-glob-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options.js.map b/node_modules/@actions/glob/lib/internal-glob-options.js.map new file mode 100644 index 0000000..dec7953 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-glob-options.js","sourceRoot":"","sources":["../src/internal-glob-options.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-globber.d.ts b/node_modules/@actions/glob/lib/internal-globber.d.ts new file mode 100644 index 0000000..6d0036b --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.d.ts @@ -0,0 +1,42 @@ +import { GlobOptions } from './internal-glob-options'; +export { GlobOptions }; +/** + * Used to match files and directories + */ +export interface Globber { + /** + * Returns the search path preceding the first glob segment, from each pattern. + * Duplicates and descendants of other paths are filtered out. + * + * Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`. + * + * Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`. + */ + getSearchPaths(): string[]; + /** + * Returns files and directories matching the glob patterns. + * + * Order of the results is not guaranteed. + */ + glob(): Promise; + /** + * Returns files and directories matching the glob patterns. + * + * Order of the results is not guaranteed. + */ + globGenerator(): AsyncGenerator; +} +export declare class DefaultGlobber implements Globber { + private readonly options; + private readonly patterns; + private readonly searchPaths; + private constructor(); + getSearchPaths(): string[]; + glob(): Promise; + globGenerator(): AsyncGenerator; + /** + * Constructs a DefaultGlobber + */ + static create(patterns: string, options?: GlobOptions): Promise; + private static stat; +} diff --git a/node_modules/@actions/glob/lib/internal-globber.js b/node_modules/@actions/glob/lib/internal-globber.js new file mode 100644 index 0000000..a6695b5 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.js @@ -0,0 +1,235 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultGlobber = void 0; +const core = __importStar(require("@actions/core")); +const fs = __importStar(require("fs")); +const globOptionsHelper = __importStar(require("./internal-glob-options-helper")); +const path = __importStar(require("path")); +const patternHelper = __importStar(require("./internal-pattern-helper")); +const internal_match_kind_1 = require("./internal-match-kind"); +const internal_pattern_1 = require("./internal-pattern"); +const internal_search_state_1 = require("./internal-search-state"); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-globber.js.map b/node_modules/@actions/glob/lib/internal-globber.js.map new file mode 100644 index 0000000..cd1f05d --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-globber.js","sourceRoot":"","sources":["../src/internal-globber.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,uCAAwB;AACxB,kFAAmE;AACnE,2CAA4B;AAC5B,yEAA0D;AAE1D,+DAA+C;AAC/C,yDAA0C;AAC1C,mEAAmD;AAEnD,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAiC/C,MAAa,cAAc;IAKzB,YAAoB,OAAqB;QAHxB,aAAQ,GAAc,EAAE,CAAA;QACxB,gBAAW,GAAa,EAAE,CAAA;QAGzC,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtD,CAAC;IAED,cAAc;QACZ,gBAAgB;QAChB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,CAAA;IACjC,CAAC;IAEK,IAAI;;;YACR,MAAM,MAAM,GAAa,EAAE,CAAA;;gBAC3B,KAA6B,IAAA,KAAA,cAAA,IAAI,CAAC,aAAa,EAAE,CAAA,IAAA;oBAAtC,MAAM,QAAQ,WAAA,CAAA;oBACvB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACtB;;;;;;;;;YACD,OAAO,MAAM,CAAA;;KACd;IAEM,aAAa;;YAClB,2BAA2B;YAC3B,MAAM,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YAC1D,wBAAwB;YACxB,MAAM,QAAQ,GAAc,EAAE,CAAA;YAC9B,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBACtB,IACE,OAAO,CAAC,mBAAmB;oBAC3B,CAAC,OAAO,CAAC,iBAAiB;wBACxB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,EACzD;oBACA,QAAQ,CAAC,IAAI,CACX,IAAI,0BAAO,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CACjE,CAAA;iBACF;aACF;YAED,wBAAwB;YAExB,MAAM,KAAK,GAAkB,EAAE,CAAA;YAC/B,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,cAAc,CAAC,QAAQ,CAAC,EAAE;gBAC/D,IAAI,CAAC,KAAK,CAAC,gBAAgB,UAAU,GAAG,CAAC,CAAA;gBAEzC,UAAU;gBACV,IAAI;oBACF,0DAA0D;oBAC1D,mDAAmD;oBACnD,cAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA,CAAA;iBACpC;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,SAAQ;qBACT;oBACD,MAAM,GAAG,CAAA;iBACV;gBAED,KAAK,CAAC,OAAO,CAAC,IAAI,mCAAW,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;aAC9C;YAED,SAAS;YACT,MAAM,cAAc,GAAa,EAAE,CAAA,CAAC,wBAAwB;YAC5D,OAAO,KAAK,CAAC,MAAM,EAAE;gBACnB,MAAM;gBACN,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAiB,CAAA;gBAEvC,SAAS;gBACT,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBACtD,MAAM,YAAY,GAChB,CAAC,CAAC,KAAK,IAAI,aAAa,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBAC5D,IAAI,CAAC,KAAK,IAAI,CAAC,YAAY,EAAE;oBAC3B,SAAQ;iBACT;gBAED,OAAO;gBACP,MAAM,KAAK,GAAyB,cAAM,cAAc,CAAC,IAAI,CAC3D,IAAI,EACJ,OAAO,EACP,cAAc,CACf;gBAED,iEAAiE;iBAFhE,CAAA;gBAED,iEAAiE;gBACjE,IAAI,CAAC,KAAK,EAAE;oBACV,SAAQ;iBACT;gBAED,YAAY;gBACZ,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;oBACvB,UAAU;oBACV,IAAI,KAAK,GAAG,+BAAS,CAAC,SAAS,EAAE;wBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;qBAChB;oBACD,WAAW;yBACN,IAAI,CAAC,YAAY,EAAE;wBACtB,SAAQ;qBACT;oBAED,kCAAkC;oBAClC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;oBACjC,MAAM,UAAU,GAAG,CAAC,cAAM,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA,CAAC,CAAC,GAAG,CAC3D,CAAC,CAAC,EAAE,CAAC,IAAI,mCAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,UAAU,CAAC,CAC1D,CAAA;oBACD,KAAK,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;iBACpC;gBACD,OAAO;qBACF,IAAI,KAAK,GAAG,+BAAS,CAAC,IAAI,EAAE;oBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;iBAChB;aACF;QACH,CAAC;KAAA;IAED;;OAEG;IACH,MAAM,CAAO,MAAM,CACjB,QAAgB,EAChB,OAAqB;;YAErB,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,OAAO,CAAC,CAAA;YAE1C,IAAI,UAAU,EAAE;gBACd,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;gBAC1C,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;aACzC;YAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YACrD,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,mBAAmB;gBACnB,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACjC,SAAQ;iBACT;gBACD,UAAU;qBACL;oBACH,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,0BAAO,CAAC,IAAI,CAAC,CAAC,CAAA;iBACxC;aACF;YAED,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAA;YAEzE,OAAO,MAAM,CAAA;QACf,CAAC;KAAA;IAEO,MAAM,CAAO,IAAI,CACvB,IAAiB,EACjB,OAAoB,EACpB,cAAwB;;YAExB,QAAQ;YACR,uEAAuE;YACvE,8CAA8C;YAC9C,IAAI,KAAe,CAAA;YACnB,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBAC/B,IAAI;oBACF,kCAAkC;oBAClC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;iBAC1C;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,IAAI,OAAO,CAAC,uBAAuB,EAAE;4BACnC,IAAI,CAAC,KAAK,CAAC,mBAAmB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAA;4BAC3C,OAAO,SAAS,CAAA;yBACjB;wBAED,MAAM,IAAI,KAAK,CACb,sCAAsC,IAAI,CAAC,IAAI,8CAA8C,CAC9F,CAAA;qBACF;oBAED,MAAM,GAAG,CAAA;iBACV;aACF;iBAAM;gBACL,uCAAuC;gBACvC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;aAC3C;YAED,+DAA+D;YAC/D,IAAI,KAAK,CAAC,WAAW,EAAE,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBACtD,mBAAmB;gBACnB,MAAM,QAAQ,GAAW,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;gBAE9D,oDAAoD;gBACpD,OAAO,cAAc,CAAC,MAAM,IAAI,IAAI,CAAC,KAAK,EAAE;oBAC1C,cAAc,CAAC,GAAG,EAAE,CAAA;iBACrB;gBAED,mBAAmB;gBACnB,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,EAAE;oBACtD,IAAI,CAAC,KAAK,CACR,oCAAoC,IAAI,CAAC,IAAI,mBAAmB,QAAQ,GAAG,CAC5E,CAAA;oBACD,OAAO,SAAS,CAAA;iBACjB;gBAED,6BAA6B;gBAC7B,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;aAC9B;YAED,OAAO,KAAK,CAAA;QACd,CAAC;KAAA;CACF;AAvMD,wCAuMC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-match-kind.d.ts b/node_modules/@actions/glob/lib/internal-match-kind.d.ts new file mode 100644 index 0000000..a7e60aa --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.d.ts @@ -0,0 +1,13 @@ +/** + * Indicates whether a pattern matches a path + */ +export declare enum MatchKind { + /** Not matched */ + None = 0, + /** Matched if the path is a directory */ + Directory = 1, + /** Matched if the path is a regular file */ + File = 2, + /** Matched */ + All = 3 +} diff --git a/node_modules/@actions/glob/lib/internal-match-kind.js b/node_modules/@actions/glob/lib/internal-match-kind.js new file mode 100644 index 0000000..37146ae --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-match-kind.js.map b/node_modules/@actions/glob/lib/internal-match-kind.js.map new file mode 100644 index 0000000..6fa3c9f --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-match-kind.js","sourceRoot":"","sources":["../src/internal-match-kind.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,IAAY,SAYX;AAZD,WAAY,SAAS;IACnB,kBAAkB;IAClB,yCAAQ,CAAA;IAER,yCAAyC;IACzC,mDAAa,CAAA;IAEb,4CAA4C;IAC5C,yCAAQ,CAAA;IAER,cAAc;IACd,uCAAsB,CAAA;AACxB,CAAC,EAZW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAYpB"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path-helper.d.ts b/node_modules/@actions/glob/lib/internal-path-helper.d.ts new file mode 100644 index 0000000..0375c36 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.d.ts @@ -0,0 +1,42 @@ +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +export declare function dirname(p: string): string; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +export declare function ensureAbsoluteRoot(root: string, itemPath: string): string; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +export declare function hasAbsoluteRoot(itemPath: string): boolean; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +export declare function hasRoot(itemPath: string): boolean; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +export declare function normalizeSeparators(p: string): string; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +export declare function safeTrimTrailingSeparator(p: string): string; diff --git a/node_modules/@actions/glob/lib/internal-path-helper.js b/node_modules/@actions/glob/lib/internal-path-helper.js new file mode 100644 index 0000000..5057add --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.js @@ -0,0 +1,198 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(require("path")); +const assert_1 = __importDefault(require("assert")); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; + } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; +} +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; +} +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path-helper.js.map b/node_modules/@actions/glob/lib/internal-path-helper.js.map new file mode 100644 index 0000000..1dc7ef9 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-path-helper.js","sourceRoot":"","sources":["../src/internal-path-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;;;;;;;;;;;;;;GAgBG;AACH,SAAgB,OAAO,CAAC,CAAS;IAC/B,wDAAwD;IACxD,CAAC,GAAG,yBAAyB,CAAC,CAAC,CAAC,CAAA;IAEhC,kDAAkD;IAClD,IAAI,UAAU,IAAI,yBAAyB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACnD,OAAO,CAAC,CAAA;KACT;IAED,cAAc;IACd,IAAI,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;IAE5B,gEAAgE;IAChE,IAAI,UAAU,IAAI,wBAAwB,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACvD,MAAM,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAA;KAC3C;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAlBD,0BAkBC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,IAAY,EAAE,QAAgB;IAC/D,gBAAM,CAAC,IAAI,EAAE,uDAAuD,CAAC,CAAA;IACrE,gBAAM,CAAC,QAAQ,EAAE,2DAA2D,CAAC,CAAA;IAE7E,iBAAiB;IACjB,IAAI,eAAe,CAAC,QAAQ,CAAC,EAAE;QAC7B,OAAO,QAAQ,CAAA;KAChB;IAED,UAAU;IACV,IAAI,UAAU,EAAE;QACd,sCAAsC;QACtC,IAAI,QAAQ,CAAC,KAAK,CAAC,yBAAyB,CAAC,EAAE;YAC7C,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACvB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,0CAA0C;YAC1C,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE;gBACtD,sBAAsB;gBACtB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;oBACzB,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAC3C;gBACD,2BAA2B;qBACtB;oBACH,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;wBACvB,GAAG,IAAI,IAAI,CAAA;qBACZ;oBACD,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAChE;aACF;YACD,kBAAkB;iBACb;gBACH,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;aAChD;SACF;QACD,oCAAoC;aAC/B,IAAI,mBAAmB,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE;YAC7D,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACzB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,OAAO,GAAG,GAAG,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;SAC3C;KACF;IAED,gBAAM,CACJ,eAAe,CAAC,IAAI,CAAC,EACrB,gEAAgE,CACjE,CAAA;IAED,8CAA8C;IAC9C,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE;QAC7D,sBAAsB;KACvB;SAAM;QACL,mBAAmB;QACnB,IAAI,IAAI,IAAI,CAAC,GAAG,CAAA;KACjB;IAED,OAAO,IAAI,GAAG,QAAQ,CAAA;AACxB,CAAC;AAlED,gDAkEC;AAED;;;GAGG;AACH,SAAgB,eAAe,CAAC,QAAgB;IAC9C,gBAAM,CAAC,QAAQ,EAAE,wDAAwD,CAAC,CAAA;IAE1E,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,iCAAiC;QACjC,OAAO,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAClE;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAdD,0CAcC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,QAAgB;IACtC,gBAAM,CAAC,QAAQ,EAAE,iDAAiD,CAAC,CAAA;IAEnE,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,8BAA8B;QAC9B,sBAAsB;QACtB,OAAO,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAC9D;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,0BAeC;AAED;;GAEG;AACH,SAAgB,mBAAmB,CAAC,CAAS;IAC3C,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IAEX,UAAU;IACV,IAAI,UAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,MAAM,KAAK,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA,CAAC,eAAe;QACnD,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA,CAAC,8BAA8B;KACtF;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,kDAeC;AAED;;;GAGG;AACH,SAAgB,yBAAyB,CAAC,CAAS;IACjD,yBAAyB;IACzB,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,EAAE,CAAA;KACV;IAED,uBAAuB;IACvB,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAE1B,oBAAoB;IACpB,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,OAAO,CAAC,CAAA;KACT;IAED,8CAA8C;IAC9C,IAAI,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;QAClB,OAAO,CAAC,CAAA;KACT;IAED,2CAA2C;IAC3C,IAAI,UAAU,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACvC,OAAO,CAAC,CAAA;KACT;IAED,gCAAgC;IAChC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;AAClC,CAAC;AA1BD,8DA0BC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path.d.ts b/node_modules/@actions/glob/lib/internal-path.d.ts new file mode 100644 index 0000000..5e0a264 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.d.ts @@ -0,0 +1,15 @@ +/** + * Helper class for parsing paths into segments + */ +export declare class Path { + segments: string[]; + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath: string | string[]); + /** + * Converts the path to it's string representation + */ + toString(): string; +} diff --git a/node_modules/@actions/glob/lib/internal-path.js b/node_modules/@actions/glob/lib/internal-path.js new file mode 100644 index 0000000..88a8715 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.js @@ -0,0 +1,113 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Path = void 0; +const path = __importStar(require("path")); +const pathHelper = __importStar(require("./internal-path-helper")); +const assert_1 = __importDefault(require("assert")); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path.js.map b/node_modules/@actions/glob/lib/internal-path.js.map new file mode 100644 index 0000000..8e8eeb5 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-path.js","sourceRoot":"","sources":["../src/internal-path.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;GAEG;AACH,MAAa,IAAI;IAGf;;;OAGG;IACH,YAAY,QAA2B;QANvC,aAAQ,GAAa,EAAE,CAAA;QAOrB,SAAS;QACT,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;YAChC,gBAAM,CAAC,QAAQ,EAAE,wCAAwC,CAAC,CAAA;YAE1D,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;YAEzD,aAAa;YACb,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;gBACjC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;aACzC;YACD,SAAS;iBACJ;gBACH,0CAA0C;gBAC1C,IAAI,SAAS,GAAG,QAAQ,CAAA;gBACxB,IAAI,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;gBACvC,OAAO,GAAG,KAAK,SAAS,EAAE;oBACxB,kBAAkB;oBAClB,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;oBACzC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;oBAE/B,4BAA4B;oBAC5B,SAAS,GAAG,GAAG,CAAA;oBACf,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;iBACpC;gBAED,wBAAwB;gBACxB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;aACjC;SACF;QACD,QAAQ;aACH;YACH,oBAAoB;YACpB,gBAAM,CACJ,QAAQ,CAAC,MAAM,GAAG,CAAC,EACnB,iDAAiD,CAClD,CAAA;YAED,eAAe;YACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACxC,IAAI,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAA;gBAEzB,oBAAoB;gBACpB,gBAAM,CACJ,OAAO,EACP,0DAA0D,CAC3D,CAAA;gBAED,oBAAoB;gBACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;gBAErD,eAAe;gBACf,IAAI,CAAC,KAAK,CAAC,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;oBAC1C,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;oBACvD,gBAAM,CACJ,OAAO,KAAK,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EACvC,8EAA8E,CAC/E,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;gBACD,qBAAqB;qBAChB;oBACH,yBAAyB;oBACzB,gBAAM,CACJ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAC3B,0DAA0D,CAC3D,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;aACF;SACF;IACH,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,gBAAgB;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;QAE7B,aAAa;QACb,IAAI,SAAS,GACX,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAA;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,IAAI,SAAS,EAAE;gBACb,SAAS,GAAG,KAAK,CAAA;aAClB;iBAAM;gBACL,MAAM,IAAI,IAAI,CAAC,GAAG,CAAA;aACnB;YAED,MAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;SAC3B;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAvGD,oBAuGC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts b/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts new file mode 100644 index 0000000..c5dcc07 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts @@ -0,0 +1,15 @@ +import { MatchKind } from './internal-match-kind'; +import { Pattern } from './internal-pattern'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +export declare function getSearchPaths(patterns: Pattern[]): string[]; +/** + * Matches the patterns against the path + */ +export declare function match(patterns: Pattern[], itemPath: string): MatchKind; +/** + * Checks whether to descend further into the directory + */ +export declare function partialMatch(patterns: Pattern[], itemPath: string): boolean; diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.js b/node_modules/@actions/glob/lib/internal-pattern-helper.js new file mode 100644 index 0000000..a7a0440 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.js @@ -0,0 +1,94 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(require("./internal-path-helper")); +const internal_match_kind_1 = require("./internal-match-kind"); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.js.map b/node_modules/@actions/glob/lib/internal-pattern-helper.js.map new file mode 100644 index 0000000..ae37285 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-pattern-helper.js","sourceRoot":"","sources":["../src/internal-pattern-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mEAAoD;AACpD,+DAA+C;AAG/C,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;GAGG;AACH,SAAgB,cAAc,CAAC,QAAmB;IAChD,yBAAyB;IACzB,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAA;IAE1C,mCAAmC;IACnC,MAAM,aAAa,GAA4B,EAAE,CAAA;IACjD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,aAAa,CAAC,GAAG,CAAC,GAAG,WAAW,CAAA;KACjC;IAED,MAAM,MAAM,GAAa,EAAE,CAAA;IAE3B,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,4BAA4B;QAC5B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,UAAU,EAAE;YACrC,SAAQ;SACT;QAED,oCAAoC;QACpC,IAAI,aAAa,GAAG,KAAK,CAAA;QACzB,IAAI,OAAO,GAAG,GAAG,CAAA;QACjB,IAAI,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;QACxC,OAAO,MAAM,KAAK,OAAO,EAAE;YACzB,IAAI,aAAa,CAAC,MAAM,CAAC,EAAE;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,MAAK;aACN;YAED,OAAO,GAAG,MAAM,CAAA;YAChB,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;SACrC;QAED,2CAA2C;QAC3C,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;YAC/B,aAAa,CAAC,GAAG,CAAC,GAAG,UAAU,CAAA;SAChC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,wCA8CC;AAED;;GAEG;AACH,SAAgB,KAAK,CAAC,QAAmB,EAAE,QAAgB;IACzD,IAAI,MAAM,GAAc,+BAAS,CAAC,IAAI,CAAA;IAEtC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SACnC;aAAM;YACL,MAAM,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SAClC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAZD,sBAYC;AAED;;GAEG;AACH,SAAgB,YAAY,CAAC,QAAmB,EAAE,QAAgB;IAChE,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAA;AAClE,CAAC;AAFD,oCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern.d.ts b/node_modules/@actions/glob/lib/internal-pattern.d.ts new file mode 100644 index 0000000..2c2224c --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.d.ts @@ -0,0 +1,64 @@ +import { MatchKind } from './internal-match-kind'; +export declare class Pattern { + /** + * Indicates whether matches should be excluded from the result set + */ + readonly negate: boolean; + /** + * The directory to search. The literal path prior to the first glob segment. + */ + readonly searchPath: string; + /** + * The path/pattern segments. Note, only the first segment (the root directory) + * may contain a directory separator character. Use the trailingSeparator field + * to determine whether the pattern ended with a trailing slash. + */ + readonly segments: string[]; + /** + * Indicates the pattern should only match directories, not regular files. + */ + readonly trailingSeparator: boolean; + /** + * The Minimatch object used for matching + */ + private readonly minimatch; + /** + * Used to workaround a limitation with Minimatch when determining a partial + * match and the path is a root directory. For example, when the pattern is + * `/foo/**` or `C:\foo\**` and the path is `/` or `C:\`. + */ + private readonly rootRegExp; + /** + * Indicates that the pattern is implicitly added as opposed to user specified. + */ + private readonly isImplicitPattern; + constructor(pattern: string); + constructor(pattern: string, isImplicitPattern: boolean, segments: undefined, homedir: string); + constructor(negate: boolean, isImplicitPattern: boolean, segments: string[], homedir?: string); + /** + * Matches the pattern against the specified path + */ + match(itemPath: string): MatchKind; + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath: string): boolean; + /** + * Escapes glob patterns within a path + */ + static globEscape(s: string): string; + /** + * Normalizes slashes and ensures absolute root + */ + private static fixupPattern; + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + private static getLiteral; + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + private static regExpEscape; +} diff --git a/node_modules/@actions/glob/lib/internal-pattern.js b/node_modules/@actions/glob/lib/internal-pattern.js new file mode 100644 index 0000000..a17ffe9 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.js @@ -0,0 +1,255 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const pathHelper = __importStar(require("./internal-path-helper")); +const assert_1 = __importDefault(require("assert")); +const minimatch_1 = require("minimatch"); +const internal_match_kind_1 = require("./internal-match-kind"); +const internal_path_1 = require("./internal-path"); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern.js.map b/node_modules/@actions/glob/lib/internal-pattern.js.map new file mode 100644 index 0000000..08e172d --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-pattern.js","sourceRoot":"","sources":["../src/internal-pattern.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAC3B,yCAA8E;AAC9E,+DAA+C;AAC/C,mDAAoC;AAEpC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C,MAAa,OAAO;IAqDlB,YACE,eAAiC,EACjC,iBAAiB,GAAG,KAAK,EACzB,QAAmB,EACnB,OAAgB;QAxDlB;;WAEG;QACM,WAAM,GAAY,KAAK,CAAA;QAuD9B,mBAAmB;QACnB,IAAI,OAAe,CAAA;QACnB,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;YACvC,OAAO,GAAG,eAAe,CAAC,IAAI,EAAE,CAAA;SACjC;QACD,oBAAoB;aACf;YACH,qBAAqB;YACrB,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;YACzB,gBAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,qCAAqC,CAAC,CAAA;YAC9D,MAAM,IAAI,GAAG,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;YAC5C,gBAAM,CACJ,IAAI,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,EACxC,wDAAwD,CACzD,CAAA;YACD,OAAO,GAAG,IAAI,oBAAI,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAA;YAC9C,IAAI,eAAe,EAAE;gBACnB,OAAO,GAAG,IAAI,OAAO,EAAE,CAAA;aACxB;SACF;QAED,SAAS;QACT,OAAO,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YAC9B,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,MAAM,CAAA;YAC1B,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAA;SACnC;QAED,8CAA8C;QAC9C,OAAO,GAAG,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAEhD,WAAW;QACX,IAAI,CAAC,QAAQ,GAAG,IAAI,oBAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAA;QAE1C,wFAAwF;QACxF,IAAI,CAAC,iBAAiB,GAAG,UAAU;aAChC,mBAAmB,CAAC,OAAO,CAAC;aAC5B,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACrB,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;QAEvD,6DAA6D;QAC7D,IAAI,SAAS,GAAG,KAAK,CAAA;QACrB,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ;aACjC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;aAC/B,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,SAAS,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;QACrD,IAAI,CAAC,UAAU,GAAG,IAAI,oBAAI,CAAC,cAAc,CAAC,CAAC,QAAQ,EAAE,CAAA;QAErD,wDAAwD;QACxD,IAAI,CAAC,UAAU,GAAG,IAAI,MAAM,CAC1B,OAAO,CAAC,YAAY,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EACvC,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACtB,CAAA;QAED,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAA;QAE1C,mBAAmB;QACnB,MAAM,gBAAgB,GAAsB;YAC1C,GAAG,EAAE,IAAI;YACT,OAAO,EAAE,IAAI;YACb,MAAM,EAAE,UAAU;YAClB,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI;YACX,QAAQ,EAAE,IAAI;SACf,CAAA;QACD,OAAO,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAA;QAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,qBAAS,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAA;IAC3D,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAgB;QACpB,4BAA4B;QAC5B,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;YACpD,oBAAoB;YACpB,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAA;YAEnD,wFAAwF;YACxF,sFAAsF;YACtF,+FAA+F;YAC/F,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,iBAAiB,KAAK,KAAK,EAAE;gBACpE,uFAAuF;gBACvF,sFAAsF;gBACtF,QAAQ,GAAG,GAAG,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;aACpC;SACF;aAAM;YACL,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;SAC1D;QAED,QAAQ;QACR,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;YAClC,OAAO,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,+BAAS,CAAC,SAAS,CAAC,CAAC,CAAC,+BAAS,CAAC,GAAG,CAAA;SACpE;QAED,OAAO,+BAAS,CAAC,IAAI,CAAA;IACvB,CAAC;IAED;;OAEG;IACH,YAAY,CAAC,QAAgB;QAC3B,wDAAwD;QACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;QAEzD,+CAA+C;QAC/C,IAAI,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE;YAC7C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;SACtC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAC5B,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAC1C,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,EACrB,IAAI,CACL,CAAA;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,UAAU,CAAC,CAAS;QACzB,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,4BAA4B;aAC5E,OAAO,CAAC,kBAAkB,EAAE,KAAK,CAAC,CAAC,sDAAsD;aACzF,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,aAAa;aACnC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA,CAAC,aAAa;IACxC,CAAC;IAED;;OAEG;IACK,MAAM,CAAC,YAAY,CAAC,OAAe,EAAE,OAAgB;QAC3D,QAAQ;QACR,gBAAM,CAAC,OAAO,EAAE,yBAAyB,CAAC,CAAA;QAE1C,qDAAqD;QACrD,gCAAgC;QAChC,MAAM,eAAe,GAAG,IAAI,oBAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACzD,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CACtB,CAAA;QACD,gBAAM,CACJ,eAAe,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EACrE,oBAAoB,OAAO,kDAAkD,CAC9E,CAAA;QAED,kEAAkE;QAClE,gBAAM,CACJ,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,eAAe,CAAC,CAAC,CAAC,EAClD,oBAAoB,OAAO,yCAAyC,CACrE,CAAA;QAED,oBAAoB;QACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,CAAA;QAEjD,8BAA8B;QAC9B,IAAI,OAAO,KAAK,GAAG,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC,EAAE;YACzD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAChE;QACD,8BAA8B;aACzB,IAAI,OAAO,KAAK,GAAG,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC,EAAE;YAC9D,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC,OAAO,EAAE,CAAA;YACjC,gBAAM,CAAC,OAAO,EAAE,oCAAoC,CAAC,CAAA;YACrD,gBAAM,CACJ,UAAU,CAAC,eAAe,CAAC,OAAO,CAAC,EACnC,wDAAwD,OAAO,GAAG,CACnE,CAAA;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAC1D;QACD,2DAA2D;aACtD,IACH,UAAU;YACV,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,OAAO,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,EAC9D;YACA,IAAI,IAAI,GAAG,UAAU,CAAC,kBAAkB,CACtC,gBAAgB,EAChB,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CACrB,CAAA;YACD,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAC9C,IAAI,IAAI,IAAI,CAAA;aACb;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACvD;QACD,mDAAmD;aAC9C,IAAI,UAAU,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;YACtE,IAAI,IAAI,GAAG,UAAU,CAAC,kBAAkB,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAA;YAChE,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBACxB,IAAI,IAAI,IAAI,CAAA;aACb;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACvD;QACD,iCAAiC;aAC5B;YACH,OAAO,GAAG,UAAU,CAAC,kBAAkB,CACrC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EACjC,OAAO,CACR,CAAA;SACF;QAED,OAAO,UAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,CAAA;IAChD,CAAC;IAED;;;OAGG;IACK,MAAM,CAAC,UAAU,CAAC,OAAe;QACvC,IAAI,OAAO,GAAG,EAAE,CAAA;QAChB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAA;YACpB,SAAS;YACT,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;gBACvD,OAAO,IAAI,OAAO,CAAC,EAAE,CAAC,CAAC,CAAA;gBACvB,SAAQ;aACT;YACD,WAAW;iBACN,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,EAAE;gBAC/B,OAAO,EAAE,CAAA;aACV;YACD,gBAAgB;iBACX,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;gBAC5C,IAAI,GAAG,GAAG,EAAE,CAAA;gBACZ,IAAI,MAAM,GAAG,CAAC,CAAC,CAAA;gBACf,KAAK,IAAI,EAAE,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,EAAE,EAAE;oBAC9C,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAAA;oBACtB,SAAS;oBACT,IAAI,EAAE,KAAK,IAAI,IAAI,CAAC,UAAU,IAAI,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;wBACzD,GAAG,IAAI,OAAO,CAAC,EAAE,EAAE,CAAC,CAAA;wBACpB,SAAQ;qBACT;oBACD,SAAS;yBACJ,IAAI,EAAE,KAAK,GAAG,EAAE;wBACnB,MAAM,GAAG,EAAE,CAAA;wBACX,MAAK;qBACN;oBACD,YAAY;yBACP;wBACH,GAAG,IAAI,EAAE,CAAA;qBACV;iBACF;gBAED,UAAU;gBACV,IAAI,MAAM,IAAI,CAAC,EAAE;oBACf,iBAAiB;oBACjB,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;wBAClB,OAAO,EAAE,CAAA;qBACV;oBAED,qBAAqB;oBACrB,IAAI,GAAG,EAAE;wBACP,OAAO,IAAI,GAAG,CAAA;wBACd,CAAC,GAAG,MAAM,CAAA;wBACV,SAAQ;qBACT;iBACF;gBAED,sBAAsB;aACvB;YAED,SAAS;YACT,OAAO,IAAI,CAAC,CAAA;SACb;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;;OAGG;IACK,MAAM,CAAC,YAAY,CAAC,CAAS;QACnC,OAAO,CAAC,CAAC,OAAO,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;IAC7C,CAAC;CACF;AAzUD,0BAyUC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-search-state.d.ts b/node_modules/@actions/glob/lib/internal-search-state.d.ts new file mode 100644 index 0000000..aac344e --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.d.ts @@ -0,0 +1,5 @@ +export declare class SearchState { + readonly path: string; + readonly level: number; + constructor(path: string, level: number); +} diff --git a/node_modules/@actions/glob/lib/internal-search-state.js b/node_modules/@actions/glob/lib/internal-search-state.js new file mode 100644 index 0000000..5d266ce --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-search-state.js.map b/node_modules/@actions/glob/lib/internal-search-state.js.map new file mode 100644 index 0000000..4421012 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-search-state.js","sourceRoot":"","sources":["../src/internal-search-state.ts"],"names":[],"mappings":";;;AAAA,MAAa,WAAW;IAItB,YAAY,IAAY,EAAE,KAAa;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;CACF;AARD,kCAQC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/package.json b/node_modules/@actions/glob/package.json new file mode 100644 index 0000000..4dfd76a --- /dev/null +++ b/node_modules/@actions/glob/package.json @@ -0,0 +1,43 @@ +{ + "name": "@actions/glob", + "version": "0.1.2", + "preview": true, + "description": "Actions glob lib", + "keywords": [ + "github", + "actions", + "glob" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/glob", + "license": "MIT", + "main": "lib/glob.js", + "types": "lib/glob.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/glob" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } +} diff --git a/node_modules/@actions/http-client/lib/auth.js.map b/node_modules/@actions/http-client/lib/auth.js.map index 7d3a18a..62cc16b 100644 --- a/node_modules/@actions/http-client/lib/auth.js.map +++ b/node_modules/@actions/http-client/lib/auth.js.map @@ -1 +1 @@ -{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"} \ No newline at end of file +{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAK/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA5BD,oFA4BC"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/index.d.ts b/node_modules/@actions/http-client/lib/index.d.ts index fe733d1..38db700 100644 --- a/node_modules/@actions/http-client/lib/index.d.ts +++ b/node_modules/@actions/http-client/lib/index.d.ts @@ -1,6 +1,9 @@ /// +/// +/// import * as http from 'http'; import * as ifm from './interfaces'; +import { ProxyAgent } from 'undici'; export declare enum HttpCodes { OK = 200, MultipleChoices = 300, @@ -51,6 +54,7 @@ export declare class HttpClientResponse { constructor(message: http.IncomingMessage); message: http.IncomingMessage; readBody(): Promise; + readBodyBuffer?(): Promise; } export declare function isHttps(requestUrl: string): boolean; export declare class HttpClient { @@ -66,6 +70,7 @@ export declare class HttpClient { private _maxRetries; private _agent; private _proxyAgent; + private _proxyAgentDispatcher; private _keepAlive; private _disposed; constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions); @@ -114,10 +119,12 @@ export declare class HttpClient { * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl: string): http.Agent; + getAgentDispatcher(serverUrl: string): ProxyAgent | undefined; private _prepareRequest; private _mergeHeaders; private _getExistingOrDefaultHeader; private _getAgent; + private _getProxyAgentDispatcher; private _performExponentialBackoff; private _processResponse; } diff --git a/node_modules/@actions/http-client/lib/index.js b/node_modules/@actions/http-client/lib/index.js index a1b7d03..f627c6a 100644 --- a/node_modules/@actions/http-client/lib/index.js +++ b/node_modules/@actions/http-client/lib/index.js @@ -2,7 +2,11 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -15,7 +19,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -34,6 +38,7 @@ const http = __importStar(require("http")); const https = __importStar(require("https")); const pm = __importStar(require("./proxy")); const tunnel = __importStar(require("tunnel")); +const undici_1 = require("undici"); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -63,16 +68,16 @@ var HttpCodes; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); +})(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com @@ -123,6 +128,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -428,6 +446,15 @@ class HttpClient { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; @@ -475,7 +502,7 @@ class HttpClient { if (this._keepAlive && useProxy) { agent = this._proxyAgent; } - if (this._keepAlive && !useProxy) { + if (!useProxy) { agent = this._agent; } // if agent is already assigned use that agent. @@ -507,16 +534,12 @@ class HttpClient { agent = tunnelAgent(agentOptions); this._proxyAgent = agent; } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { + // if tunneling agent isn't assigned create a new agent + if (!agent) { const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options @@ -527,6 +550,30 @@ class HttpClient { } return agent; } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); diff --git a/node_modules/@actions/http-client/lib/index.js.map b/node_modules/@actions/http-client/lib/index.js.map index ca8ea41..1fce6fc 100644 --- a/node_modules/@actions/http-client/lib/index.js.map +++ b/node_modules/@actions/http-client/lib/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAEhC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,GAAP,eAAO,KAAP,eAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAnBD,gDAmBC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAiBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAf7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAGf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAS,GAAG;YAC1B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,QAAQ,EAAE;YAChC,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,wFAAwF;QACxF,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,KAAK,EAAE;YAC7B,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,gFAAgF;QAChF,IAAI,CAAC,KAAK,EAAE;YACV,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;SACxD;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAlpBD,gCAkpBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAChC,mCAAiC;AAEjC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,yBAAT,SAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,uBAAP,OAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,0BAAV,UAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;IAEK,cAAc;;YAClB,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,MAAM,MAAM,GAAa,EAAE,CAAA;gBAE3B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;gBACpB,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;gBAChC,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAjCD,gDAiCC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAkBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAhB7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAIf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAU,GAAG;YAC3B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAED,kBAAkB,CAAC,SAAiB;QAClC,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAC9C,IAAI,CAAC,QAAQ,EAAE;YACb,OAAM;SACP;QAED,OAAO,IAAI,CAAC,wBAAwB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAA;IAC3D,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,CAAC,QAAQ,EAAE;YACb,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,uDAAuD;QACvD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,wBAAwB,CAAC,SAAc,EAAE,QAAa;QAC5D,IAAI,UAAU,CAAA;QAEd,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,UAAU,GAAG,IAAI,CAAC,qBAAqB,CAAA;SACxC;QAED,+CAA+C;QAC/C,IAAI,UAAU,EAAE;YACd,OAAO,UAAU,CAAA;SAClB;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,UAAU,GAAG,IAAI,mBAAU,iBACzB,GAAG,EAAE,QAAQ,CAAC,IAAI,EAClB,UAAU,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IACjC,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;YAC9C,KAAK,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;SACnD,CAAC,EACF,CAAA;QACF,IAAI,CAAC,qBAAqB,GAAG,UAAU,CAAA;QAEvC,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE;gBACtE,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,UAAU,CAAA;IACnB,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AA3rBD,gCA2rBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/interfaces.d.ts b/node_modules/@actions/http-client/lib/interfaces.d.ts index 54fd4a8..775ced9 100644 --- a/node_modules/@actions/http-client/lib/interfaces.d.ts +++ b/node_modules/@actions/http-client/lib/interfaces.d.ts @@ -1,4 +1,6 @@ /// +/// +/// import * as http from 'http'; import * as https from 'https'; import { HttpClientResponse } from './index'; diff --git a/node_modules/@actions/http-client/lib/proxy.js b/node_modules/@actions/http-client/lib/proxy.js index 76abb72..d9c43ad 100644 --- a/node_modules/@actions/http-client/lib/proxy.js +++ b/node_modules/@actions/http-client/lib/proxy.js @@ -15,7 +15,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; diff --git a/node_modules/@actions/http-client/lib/proxy.js.map b/node_modules/@actions/http-client/lib/proxy.js.map index b820679..585c17d 100644 --- a/node_modules/@actions/http-client/lib/proxy.js.map +++ b/node_modules/@actions/http-client/lib/proxy.js.map @@ -1 +1 @@ -{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"} \ No newline at end of file +{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,IAAI;YACF,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;SACzB;QAAC,WAAM;YACN,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;gBACrE,OAAO,IAAI,GAAG,CAAC,UAAU,QAAQ,EAAE,CAAC,CAAA;SACvC;KACF;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AAzBD,kCAyBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/package.json b/node_modules/@actions/http-client/package.json index 7f5c8ec..0ae89c3 100644 --- a/node_modules/@actions/http-client/package.json +++ b/node_modules/@actions/http-client/package.json @@ -1,6 +1,6 @@ { "name": "@actions/http-client", - "version": "2.1.0", + "version": "2.2.1", "description": "Actions Http Client", "keywords": [ "github", @@ -39,10 +39,13 @@ "url": "https://github.com/actions/toolkit/issues" }, "devDependencies": { + "@types/node": "20.7.1", "@types/tunnel": "0.0.3", - "proxy": "^1.0.1" + "proxy": "^2.1.1", + "@types/proxy": "^1.0.1" }, "dependencies": { - "tunnel": "^0.0.6" + "tunnel": "^0.0.6", + "undici": "^5.25.4" } -} +} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/CHANGELOG.md b/node_modules/@azure/abort-controller/CHANGELOG.md new file mode 100644 index 0000000..b764523 --- /dev/null +++ b/node_modules/@azure/abort-controller/CHANGELOG.md @@ -0,0 +1,34 @@ +# Release History + +## 1.1.0 (2022-05-05) + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features +- With the dropping of support for Node.js versions that are no longer in LTS, the dependency on `@types/node` has been updated to version 12. Read our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +## 1.0.4 (2021-03-04) + +Fixes issue [13985](https://github.com/Azure/azure-sdk-for-js/issues/13985) where abort event listeners that removed themselves when invoked could prevent other event listeners from being invoked. + +## 1.0.3 (2021-02-23) + +Support Typescript version < 3.6 by down-leveling the type definition files. ([PR 12793](https://github.com/Azure/azure-sdk-for-js/pull/12793)) + +## 1.0.2 (2020-01-07) + +Updates the `tslib` dependency to version 2.x. + +## 1.0.1 (2019-12-04) + +Fixes the [bug 6271](https://github.com/Azure/azure-sdk-for-js/issues/6271) that can occur with angular prod builds due to triple-slash directives. +([PR 6344](https://github.com/Azure/azure-sdk-for-js/pull/6344)) + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/abort-controller` package. + +Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. +([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.2 (2019-09-09) + +Listeners attached to an `AbortSignal` now receive an event with the type `abort`. (PR #4756) diff --git a/node_modules/@azure/abort-controller/LICENSE b/node_modules/@azure/abort-controller/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/abort-controller/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/abort-controller/README.md b/node_modules/@azure/abort-controller/README.md new file mode 100644 index 0000000..ce578d9 --- /dev/null +++ b/node_modules/@azure/abort-controller/README.md @@ -0,0 +1,110 @@ +# Azure Abort Controller client library for JavaScript + +The `@azure/abort-controller` package provides `AbortController` and `AbortSignal` classes. These classes are compatible +with the [AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) built into modern browsers +and the `AbortSignal` used by [fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API). +Use the `AbortController` class to create an instance of the `AbortSignal` class that can be used to cancel an operation +in an Azure SDK that accept a parameter of type `AbortSignalLike`. + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller) +- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme) + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/abort-controller +``` + +## Key Concepts + +Use the `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel +pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`. +An `AbortSignal` can also be returned directly from a static method, e.g. `AbortController.timeout(100)`. +that is cancelled after 100 milliseconds. + +Calling `abort()` on the instantiated `AbortController` invokes the registered `abort` +event listeners on the associated `AbortSignal`. +Any subsequent calls to `abort()` on the same controller will have no effect. + +The `AbortSignal.none` static property returns an `AbortSignal` that can not be aborted. + +Multiple instances of an `AbortSignal` can be linked so that calling `abort()` on the parent signal, +aborts all linked signals. +This linkage is one-way, meaning that a parent signal can affect a linked signal, but not the other way around. +To link `AbortSignals` together, pass in the parent signals to the `AbortController` constructor. + +## Examples + +The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is +of the abort signal. + +### Example 1 - basic usage + +```js +import { AbortController } from "@azure/abort-controller"; + +const controller = new AbortController(); +doAsyncWork({ abortSignal: controller.signal }); + +// at some point later +controller.abort(); +``` + +### Example 2 - Aborting with timeout + +```js +import { AbortController } from "@azure/abort-controller"; + +const signal = AbortController.timeout(1000); +doAsyncWork({ abortSignal: signal }); +``` + +### Example 3 - Aborting sub-tasks + +```js +import { AbortController } from "@azure/abort-controller"; + +const allTasksController = new AbortController(); + +const subTask1 = new AbortController(allTasksController.signal); +const subtask2 = new AbortController(allTasksController.signal); + +allTasksController.abort(); // aborts allTasksSignal, subTask1, subTask2 +subTask1.abort(); // aborts only subTask1 +``` + +### Example 4 - Aborting with parent signal or timeout + +```js +import { AbortController } from "@azure/abort-controller"; + +const allTasksController = new AbortController(); + +// create a subtask controller that can be aborted manually, +// or when either the parent task aborts or the timeout is reached. +const subTask = new AbortController(allTasksController.signal, AbortController.timeout(100)); + +allTasksController.abort(); // aborts allTasksSignal, subTask +subTask.abort(); // aborts only subTask +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fabort-controller%2FREADME.png) diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js new file mode 100644 index 0000000..0d260f3 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortSignal, abortSignal } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } +} +//# sourceMappingURL=AbortController.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map new file mode 100644 index 0000000..8a6f7d0 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortController.js","sourceRoot":"","sources":["../../src/AbortController.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAmB,WAAW,EAAE,MAAM,eAAe,CAAC;AAE1E;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,MAAM,OAAO,eAAe;IAW1B,6EAA6E;IAC7E,YAAY,aAAmB;QAC7B,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAEjC,IAAI,CAAC,aAAa,EAAE;YAClB,OAAO;SACR;QACD,qCAAqC;QACrC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YACjC,8CAA8C;YAC9C,aAAa,GAAG,SAAS,CAAC;SAC3B;QACD,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,uDAAuD;YACvD,0CAA0C;YAC1C,IAAI,YAAY,CAAC,OAAO,EAAE;gBACxB,IAAI,CAAC,KAAK,EAAE,CAAC;aACd;iBAAM;gBACL,6DAA6D;gBAC7D,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;oBAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;gBACf,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACH,KAAK;QACH,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC5B,CAAC;IAED;;;OAGG;IACI,MAAM,CAAC,OAAO,CAAC,EAAU;QAC9B,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;QACjC,MAAM,KAAK,GAAG,UAAU,CAAC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;QAClD,uEAAuE;QACvE,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACrC,KAAK,CAAC,KAAK,EAAE,CAAC;SACf;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignal, AbortSignalLike, abortSignal } from \"./AbortSignal\";\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nexport class AbortController {\n private _signal: AbortSignal;\n\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(parentSignals?: AbortSignalLike[]);\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(...parentSignals: AbortSignalLike[]);\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n constructor(parentSignals?: any) {\n this._signal = new AbortSignal();\n\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (const parentSignal of parentSignals) {\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n } else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", () => {\n this.abort();\n });\n }\n }\n }\n\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n public get signal(): AbortSignal {\n return this._signal;\n }\n\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n abort(): void {\n abortSignal(this._signal);\n }\n\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n public static timeout(ms: number): AbortSignal {\n const signal = new AbortSignal();\n const timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js new file mode 100644 index 0000000..e97336a --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +export function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} +//# sourceMappingURL=AbortSignal.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map new file mode 100644 index 0000000..1ca33bd --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignal.js","sourceRoot":"","sources":["../../src/AbortSignal.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,6CAA6C;AAI7C,MAAM,YAAY,GAAG,IAAI,OAAO,EAAqC,CAAC;AACtE,MAAM,UAAU,GAAG,IAAI,OAAO,EAAwB,CAAC;AA6BvD;;;;;;;;;;;;GAYG;AACH,MAAM,OAAO,WAAW;IACtB;QA2BA;;WAEG;QACI,YAAO,GAAiC,IAAI,CAAC;QA7BlD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC3B,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC;IAED;;;;OAIG;IACH,IAAW,OAAO;QAChB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YACzB,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,OAAO,UAAU,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACI,MAAM,KAAK,IAAI;QACpB,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IAOD;;;;;OAKG;IACI,gBAAgB;IACrB,yCAAyC;IACzC,KAAc,EACd,QAAiD;QAEjD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YAC3B,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAC1C,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;OAKG;IACI,mBAAmB;IACxB,yCAAyC;IACzC,KAAc,EACd,QAAiD;QAEjD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YAC3B,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAE1C,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1C,IAAI,KAAK,GAAG,CAAC,CAAC,EAAE;YACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,MAAa;QACzB,MAAM,IAAI,KAAK,CACb,kHAAkH,CACnH,CAAC;IACJ,CAAC;CACF;AAED;;;;;;;;GAQG;AACH,wEAAwE;AACxE,MAAM,UAAU,WAAW,CAAC,MAAmB;IAC7C,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,OAAO;KACR;IAED,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KAC7B;IAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC;IAC5C,IAAI,SAAS,EAAE;QACb,uDAAuD;QACvD,6DAA6D;QAC7D,aAAa;QACb,SAAS,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,EAAE;YACrC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;KACJ;IAED,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// eslint-disable-next-line @typescript-eslint/triple-slash-reference\n/// \n\ntype AbortEventListener = (this: AbortSignalLike, ev?: any) => any;\n\nconst listenersMap = new WeakMap();\nconst abortedMap = new WeakMap();\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n}\n\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nexport class AbortSignal implements AbortSignalLike {\n constructor() {\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n public get aborted(): boolean {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n return abortedMap.get(this)!;\n }\n\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n public static get none(): AbortSignal {\n return new AbortSignal();\n }\n\n /**\n * onabort event listener.\n */\n public onabort: ((ev?: Event) => any) | null = null;\n\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n public addEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n listeners.push(listener);\n }\n\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n public removeEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n\n const index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n }\n\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n dispatchEvent(_event: Event): boolean {\n throw new Error(\n \"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\"\n );\n }\n}\n\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nexport function abortSignal(signal: AbortSignal): void {\n if (signal.aborted) {\n return;\n }\n\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n\n const listeners = listenersMap.get(signal)!;\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach((listener) => {\n listener.call(signal, { type: \"abort\" });\n });\n }\n\n abortedMap.set(signal, true);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/index.js b/node_modules/@azure/abort-controller/dist-esm/src/index.js new file mode 100644 index 0000000..ddbf505 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/index.js @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Changes to Aborter +// * Rename Aborter to AbortSignal +// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation +// * Remove withTimeout, it's moved to the controller +// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller. +// Potential changes to align with DOM Spec +// * dispatchEvent on Signal +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal } from "./AbortSignal"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/index.js.map b/node_modules/@azure/abort-controller/dist-esm/src/index.js.map new file mode 100644 index 0000000..def556e --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,qBAAqB;AACrB,kCAAkC;AAClC,uHAAuH;AACvH,qDAAqD;AACrD,2GAA2G;AAE3G,2CAA2C;AAC3C,4BAA4B;AAE5B,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Changes to Aborter\n// * Rename Aborter to AbortSignal\n// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation\n// * Remove withTimeout, it's moved to the controller\n// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller.\n\n// Potential changes to align with DOM Spec\n// * dispatchEvent on Signal\n\nexport { AbortController, AbortError } from \"./AbortController\";\nexport { AbortSignal, AbortSignalLike } from \"./AbortSignal\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist/index.js b/node_modules/@azure/abort-controller/dist/index.js new file mode 100644 index 0000000..650dd5f --- /dev/null +++ b/node_modules/@azure/abort-controller/dist/index.js @@ -0,0 +1,239 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} + +// Copyright (c) Microsoft Corporation. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } +} + +exports.AbortController = AbortController; +exports.AbortError = AbortError; +exports.AbortSignal = AbortSignal; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/abort-controller/dist/index.js.map b/node_modules/@azure/abort-controller/dist/index.js.map new file mode 100644 index 0000000..148b005 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/AbortSignal.ts","../src/AbortController.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// eslint-disable-next-line @typescript-eslint/triple-slash-reference\n/// \n\ntype AbortEventListener = (this: AbortSignalLike, ev?: any) => any;\n\nconst listenersMap = new WeakMap();\nconst abortedMap = new WeakMap();\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n}\n\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nexport class AbortSignal implements AbortSignalLike {\n constructor() {\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n public get aborted(): boolean {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n return abortedMap.get(this)!;\n }\n\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n public static get none(): AbortSignal {\n return new AbortSignal();\n }\n\n /**\n * onabort event listener.\n */\n public onabort: ((ev?: Event) => any) | null = null;\n\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n public addEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n listeners.push(listener);\n }\n\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n public removeEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n\n const index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n }\n\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n dispatchEvent(_event: Event): boolean {\n throw new Error(\n \"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\"\n );\n }\n}\n\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nexport function abortSignal(signal: AbortSignal): void {\n if (signal.aborted) {\n return;\n }\n\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n\n const listeners = listenersMap.get(signal)!;\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach((listener) => {\n listener.call(signal, { type: \"abort\" });\n });\n }\n\n abortedMap.set(signal, true);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignal, AbortSignalLike, abortSignal } from \"./AbortSignal\";\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nexport class AbortController {\n private _signal: AbortSignal;\n\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(parentSignals?: AbortSignalLike[]);\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(...parentSignals: AbortSignalLike[]);\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n constructor(parentSignals?: any) {\n this._signal = new AbortSignal();\n\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (const parentSignal of parentSignals) {\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n } else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", () => {\n this.abort();\n });\n }\n }\n }\n\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n public get signal(): AbortSignal {\n return this._signal;\n }\n\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n abort(): void {\n abortSignal(this._signal);\n }\n\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n public static timeout(ms: number): AbortSignal {\n const signal = new AbortSignal();\n const timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n }\n}\n"],"names":[],"mappings":";;;;AAAA;AACA;AAGA;AAIA,MAAM,YAAY,GAAG,IAAI,OAAO,EAAqC,CAAC;AACtE,MAAM,UAAU,GAAG,IAAI,OAAO,EAAwB,CAAC;AA6BvD;;;;;;;;;;;;AAYG;MACU,WAAW,CAAA;AACtB,IAAA,WAAA,GAAA;AA2BA;;AAEG;QACI,IAAO,CAAA,OAAA,GAAiC,IAAI,CAAC;AA7BlD,QAAA,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;AAC3B,QAAA,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;KAC7B;AAED;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AACzB,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;AAED,QAAA,OAAO,UAAU,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;KAC9B;AAED;;;;AAIG;AACI,IAAA,WAAW,IAAI,GAAA;QACpB,OAAO,IAAI,WAAW,EAAE,CAAC;KAC1B;AAOD;;;;;AAKG;IACI,gBAAgB;;AAErB,IAAA,KAAc,EACd,QAAiD,EAAA;AAEjD,QAAA,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;AAC1C,QAAA,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC1B;AAED;;;;;AAKG;IACI,mBAAmB;;AAExB,IAAA,KAAc,EACd,QAAiD,EAAA;AAEjD,QAAA,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAE1C,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AAC1C,QAAA,IAAI,KAAK,GAAG,CAAC,CAAC,EAAE;AACd,YAAA,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC5B,SAAA;KACF;AAED;;AAEG;AACH,IAAA,aAAa,CAAC,MAAa,EAAA;AACzB,QAAA,MAAM,IAAI,KAAK,CACb,kHAAkH,CACnH,CAAC;KACH;AACF,CAAA;AAED;;;;;;;;AAQG;AACH;AACM,SAAU,WAAW,CAAC,MAAmB,EAAA;IAC7C,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,OAAO;AACR,KAAA;IAED,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,QAAA,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC7B,KAAA;IAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC;AAC5C,IAAA,IAAI,SAAS,EAAE;;;;QAIb,SAAS,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,KAAI;YACrC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;AACJ,KAAA;AAED,IAAA,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC/B;;ACtKA;AAKA;;;;;;;;;;;;;;;;;AAiBG;AACG,MAAO,UAAW,SAAQ,KAAK,CAAA;AACnC,IAAA,WAAA,CAAY,OAAgB,EAAA;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;KAC1B;AACF,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCG;MACU,eAAe,CAAA;;AAY1B,IAAA,WAAA,CAAY,aAAmB,EAAA;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAEjC,IAAI,CAAC,aAAa,EAAE;YAClB,OAAO;AACR,SAAA;;AAED,QAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;;YAEjC,aAAa,GAAG,SAAS,CAAC;AAC3B,SAAA;AACD,QAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;;;YAGxC,IAAI,YAAY,CAAC,OAAO,EAAE;gBACxB,IAAI,CAAC,KAAK,EAAE,CAAC;AACd,aAAA;AAAM,iBAAA;;AAEL,gBAAA,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAK;oBAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;AACf,iBAAC,CAAC,CAAC;AACJ,aAAA;AACF,SAAA;KACF;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;QACf,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;AAGG;IACH,KAAK,GAAA;AACH,QAAA,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;AAED;;;AAGG;IACI,OAAO,OAAO,CAAC,EAAU,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;QACjC,MAAM,KAAK,GAAG,UAAU,CAAC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;;AAElD,QAAA,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACrC,KAAK,CAAC,KAAK,EAAE,CAAC;AACf,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/abort-controller/package.json new file mode 100644 index 0000000..6126bc2 --- /dev/null +++ b/node_modules/@azure/abort-controller/package.json @@ -0,0 +1,104 @@ +{ + "name": "@azure/abort-controller", + "sdk-type": "client", + "version": "1.1.0", + "description": "Microsoft Azure SDK for JavaScript - Aborter", + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && dev-tool run bundle", + "build:types": "downlevel-dts types/src types/3.1", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "types": "./types/src/index.d.ts", + "typesVersions": { + "<3.6": { + "types/src/*": [ + "types/3.1/*" + ] + } + }, + "files": [ + "dist/", + "dist-esm/src/", + "shims-public.d.ts", + "types/src", + "types/3.1", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=12.0.0" + }, + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "aborter", + "abortsignal", + "cancellation", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", + "sideEffects": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "ts-node": "^10.0.0", + "typescript": "~4.6.0" + } +} diff --git a/node_modules/@azure/abort-controller/shims-public.d.ts b/node_modules/@azure/abort-controller/shims-public.d.ts new file mode 100644 index 0000000..002bec3 --- /dev/null +++ b/node_modules/@azure/abort-controller/shims-public.d.ts @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// forward declaration of Event in case DOM libs are not present. +interface Event {} diff --git a/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts b/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts new file mode 100644 index 0000000..6f935db --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts @@ -0,0 +1,85 @@ +import { AbortSignal, AbortSignalLike } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export declare class AbortController { + private _signal; + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(parentSignals?: AbortSignalLike[]); + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(...parentSignals: AbortSignalLike[]); + /* + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + readonly signal: AbortSignal; + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort(): void; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms: number): AbortSignal; +} +//# sourceMappingURL=AbortController.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts b/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts new file mode 100644 index 0000000..53d6a77 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts @@ -0,0 +1,80 @@ +/// +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export declare class AbortSignal implements AbortSignalLike { + constructor(); + /* + * Status of whether aborted or not. + * + * @readonly + */ + readonly aborted: boolean; + /* + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static readonly none: AbortSignal; + /** + * onabort event listener. + */ + onabort: ((ev?: Event) => any) | null; + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event: Event): boolean; +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +export declare function abortSignal(signal: AbortSignal): void; +//# sourceMappingURL=AbortSignal.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/3.1/index.d.ts b/node_modules/@azure/abort-controller/types/3.1/index.d.ts new file mode 100644 index 0000000..1345f47 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/index.d.ts @@ -0,0 +1,3 @@ +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal, AbortSignalLike } from "./AbortSignal"; +//# sourceMappingURL=index.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/src/AbortController.d.ts b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts new file mode 100644 index 0000000..c07beb9 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts @@ -0,0 +1,85 @@ +import { AbortSignal, AbortSignalLike } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export declare class AbortController { + private _signal; + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(parentSignals?: AbortSignalLike[]); + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(...parentSignals: AbortSignalLike[]); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal(): AbortSignal; + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort(): void; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms: number): AbortSignal; +} +//# sourceMappingURL=AbortController.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map new file mode 100644 index 0000000..cb855af --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortController.d.ts","sourceRoot":"","sources":["../../src/AbortController.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAe,MAAM,eAAe,CAAC;AAE1E;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,OAAO,CAAc;IAE7B;;OAEG;gBACS,aAAa,CAAC,EAAE,eAAe,EAAE;IAC7C;;OAEG;gBACS,GAAG,aAAa,EAAE,eAAe,EAAE;IA2B/C;;;;;OAKG;IACH,IAAW,MAAM,IAAI,WAAW,CAE/B;IAED;;;OAGG;IACH,KAAK,IAAI,IAAI;IAIb;;;OAGG;WACW,OAAO,CAAC,EAAE,EAAE,MAAM,GAAG,WAAW;CAS/C"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts new file mode 100644 index 0000000..7d31cb1 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts @@ -0,0 +1,80 @@ +/// +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export declare class AbortSignal implements AbortSignalLike { + constructor(); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted(): boolean; + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none(): AbortSignal; + /** + * onabort event listener. + */ + onabort: ((ev?: Event) => any) | null; + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event: Event): boolean; +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +export declare function abortSignal(signal: AbortSignal): void; +//# sourceMappingURL=AbortSignal.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map new file mode 100644 index 0000000..c82bea3 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignal.d.ts","sourceRoot":"","sources":["../../src/AbortSignal.ts"],"names":[],"mappings":";AAWA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT;AAED;;;;;;;;;;;;GAYG;AACH,qBAAa,WAAY,YAAW,eAAe;;IAMjD;;;;OAIG;IACH,IAAW,OAAO,IAAI,OAAO,CAM5B;IAED;;;;OAIG;IACH,WAAkB,IAAI,IAAI,WAAW,CAEpC;IAED;;OAEG;IACI,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,CAAQ;IAEpD;;;;;OAKG;IACI,gBAAgB,CAErB,KAAK,EAAE,OAAO,EACd,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,GAChD,IAAI;IASP;;;;;OAKG;IACI,mBAAmB,CAExB,KAAK,EAAE,OAAO,EACd,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,GAChD,IAAI;IAaP;;OAEG;IACH,aAAa,CAAC,MAAM,EAAE,KAAK,GAAG,OAAO;CAKtC;AAED;;;;;;;;GAQG;AAEH,wBAAgB,WAAW,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI,CAoBrD"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/index.d.ts b/node_modules/@azure/abort-controller/types/src/index.d.ts new file mode 100644 index 0000000..e5afaf4 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/index.d.ts @@ -0,0 +1,3 @@ +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal, AbortSignalLike } from "./AbortSignal"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/index.d.ts.map b/node_modules/@azure/abort-controller/types/src/index.d.ts.map new file mode 100644 index 0000000..a062939 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json b/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json new file mode 100644 index 0000000..7b5aee3 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.18.11" + } + ] +} diff --git a/node_modules/@azure/core-auth/LICENSE b/node_modules/@azure/core-auth/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-auth/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-auth/README.md b/node_modules/@azure/core-auth/README.md new file mode 100644 index 0000000..31ccc7f --- /dev/null +++ b/node_modules/@azure/core-auth/README.md @@ -0,0 +1,78 @@ +# Azure Core Authentication client library for JavaScript + +The `@azure/core-auth` package provides core interfaces and helper methods for authenticating with Azure services using Azure Active Directory and other authentication schemes common across the Azure SDK. As a "core" library, it shouldn't need to be added as a dependency to any user code, only other Azure SDK libraries. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/core-auth +``` + +## Key Concepts + +The `TokenCredential` interface represents a credential capable of providing an authentication token. The `@azure/identity` package contains various credentials that implement the `TokenCredential` interface. + +The `AzureKeyCredential` is a static key-based credential that supports key rotation via the `update` method. Use this when a single secret value is needed for authentication, e.g. when using a shared access key. + +The `AzureNamedKeyCredential` is a static name/key-based credential that supports name and key rotation via the `update` method. Use this when both a secret value and a label are needed, e.g. when using a shared access key and shared access key name. + +The `AzureSASCredential` is a static signature-based credential that supports updating the signature value via the `update` method. Use this when using a shared access signature. + +## Examples + +### AzureKeyCredential + +```js +const { AzureKeyCredential } = require("@azure/core-auth"); + +const credential = new AzureKeyCredential("secret value"); +// prints: "secret value" +console.log(credential.key); +credential.update("other secret value"); +// prints: "other secret value" +console.log(credential.key); +``` + +### AzureNamedKeyCredential + +```js +const { AzureNamedKeyCredential } = require("@azure/core-auth"); + +const credential = new AzureNamedKeyCredential("ManagedPolicy", "secret value"); +// prints: "ManagedPolicy, secret value" +console.log(`${credential.name}, ${credential.key}`); +credential.update("OtherManagedPolicy", "other secret value"); +// prints: "OtherManagedPolicy, other secret value" +console.log(`${credential.name}, ${credential.key}`); +``` + +### AzureSASCredential + +```js +const { AzureSASCredential } = require("@azure/core-auth"); + +const credential = new AzureSASCredential("signature1"); +// prints: "signature1" +console.log(credential.signature); +credential.update("signature2"); +// prints: "signature2" +console.log(credential.signature); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-auth%2FREADME.png) diff --git a/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.d.ts new file mode 100644 index 0000000..b861382 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.d.ts @@ -0,0 +1,29 @@ +import { KeyCredential } from "./keyCredential.js"; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} +//# sourceMappingURL=azureKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.d.ts.map new file mode 100644 index 0000000..a9b7a6e --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAEnD;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;;;;OAKG;gBACS,GAAG,EAAE,MAAM;IAQvB;;;;;;;OAOG;IACI,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;CAGpC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js new file mode 100644 index 0000000..050f0fd --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export class AzureKeyCredential { + _key; + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +//# sourceMappingURL=azureKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js.map b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js.map new file mode 100644 index 0000000..5bbf703 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.js","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;QACpD,CAAC;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { KeyCredential } from \"./keyCredential.js\";\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.d.ts new file mode 100644 index 0000000..e1d0f63 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.d.ts @@ -0,0 +1,54 @@ +/** + * Represents a credential defined by a static API name and key. + */ +export interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; +//# sourceMappingURL=azureNamedKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.d.ts.map new file mode 100644 index 0000000..f9225a0 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;CACvB;AAED;;;GAGG;AACH,qBAAa,uBAAwB,YAAW,kBAAkB;IAChE,OAAO,CAAC,IAAI,CAAS;IACrB,OAAO,CAAC,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;OAEG;IACH,IAAW,IAAI,IAAI,MAAM,CAExB;IAED;;;;;;OAMG;gBACS,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IASrC;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;CAQrD;AAED;;;;GAIG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,kBAAkB,CAM1F"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js new file mode 100644 index 0000000..7608850 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export class AzureNamedKeyCredential { + _key; + _name; + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js.map b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js.map new file mode 100644 index 0000000..ee293c1 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.js","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAgB1D;;;GAGG;AACH,MAAM,OAAO,uBAAuB;IAC1B,IAAI,CAAS;IACb,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YAClB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC;YACxB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;QACtE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,UAAmB;IACtD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CACpC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureSASCredential.d.ts b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.d.ts new file mode 100644 index 0000000..6e4ebcc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.d.ts @@ -0,0 +1,43 @@ +/** + * Represents a credential defined by a static shared access signature. + */ +export interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; +//# sourceMappingURL=azureSASCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureSASCredential.d.ts.map b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.d.ts.map new file mode 100644 index 0000000..d5b4fc5 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.d.ts","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;CAC5B;AAED;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS,IAAI,MAAM,CAE7B;IAED;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;IAQ7B;;;;;;;OAOG;IACI,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,IAAI;CAO1C;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAIhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js new file mode 100644 index 0000000..a7da3b3 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export class AzureSASCredential { + _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} +//# sourceMappingURL=azureSASCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js.map b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js.map new file mode 100644 index 0000000..5e15d1d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.js","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAY1D;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAED;;;;;OAKG;IACH,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;IACjC,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,CAC9F,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/index.d.ts b/node_modules/@azure/core-auth/dist/browser/index.d.ts new file mode 100644 index 0000000..6d05ee8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/index.d.ts @@ -0,0 +1,7 @@ +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { KeyCredential, isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, NamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, SASCredential, isSASCredential } from "./azureSASCredential.js"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential, } from "./tokenCredential.js"; +export { TracingContext } from "./tracing.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/index.d.ts.map b/node_modules/@azure/core-auth/dist/browser/index.d.ts.map new file mode 100644 index 0000000..311c49b --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EACvB,kBAAkB,EAClB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EACL,eAAe,EACf,eAAe,EACf,WAAW,EACX,iBAAiB,GAClB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/index.js b/node_modules/@azure/core-auth/dist/browser/index.js new file mode 100644 index 0000000..38b6669 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/index.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, isSASCredential } from "./azureSASCredential.js"; +export { isTokenCredential, } from "./tokenCredential.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/index.js.map b/node_modules/@azure/core-auth/dist/browser/index.js.map new file mode 100644 index 0000000..bcdd705 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAiB,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EAEvB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAiB,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAIL,iBAAiB,GAClB,MAAM,sBAAsB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AzureKeyCredential } from \"./azureKeyCredential.js\";\nexport { KeyCredential, isKeyCredential } from \"./keyCredential.js\";\nexport {\n AzureNamedKeyCredential,\n NamedKeyCredential,\n isNamedKeyCredential,\n} from \"./azureNamedKeyCredential.js\";\nexport { AzureSASCredential, SASCredential, isSASCredential } from \"./azureSASCredential.js\";\n\nexport {\n TokenCredential,\n GetTokenOptions,\n AccessToken,\n isTokenCredential,\n} from \"./tokenCredential.js\";\n\nexport { TracingContext } from \"./tracing.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/keyCredential.d.ts b/node_modules/@azure/core-auth/dist/browser/keyCredential.d.ts new file mode 100644 index 0000000..81dbe93 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/keyCredential.d.ts @@ -0,0 +1,16 @@ +/** + * Represents a credential defined by a static API key. + */ +export interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export declare function isKeyCredential(credential: unknown): credential is KeyCredential; +//# sourceMappingURL=keyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/keyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/browser/keyCredential.d.ts.map new file mode 100644 index 0000000..217502d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/keyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.d.ts","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;CACtB;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAEhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/keyCredential.js b/node_modules/@azure/core-auth/dist/browser/keyCredential.js new file mode 100644 index 0000000..0e1d299 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/keyCredential.js @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export function isKeyCredential(credential) { + return isObjectWithProperties(credential, ["key"]) && typeof credential.key === "string"; +} +//# sourceMappingURL=keyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/keyCredential.js.map b/node_modules/@azure/core-auth/dist/browser/keyCredential.js.map new file mode 100644 index 0000000..b8fa9f4 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/keyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.js","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAY1D;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,sBAAsB,CAAC,UAAU,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ,CAAC;AAC3F,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * Tests an object to determine whether it implements KeyCredential.\n *\n * @param credential - The assumed KeyCredential to be tested.\n */\nexport function isKeyCredential(credential: unknown): credential is KeyCredential {\n return isObjectWithProperties(credential, [\"key\"]) && typeof credential.key === \"string\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/package.json b/node_modules/@azure/core-auth/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-auth/dist/browser/tokenCredential.d.ts b/node_modules/@azure/core-auth/dist/browser/tokenCredential.d.ts new file mode 100644 index 0000000..bf9a49a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tokenCredential.d.ts @@ -0,0 +1,77 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { TracingContext } from "./tracing.js"; +/** + * Represents a credential capable of providing an authentication token. + */ +export interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} +/** + * Defines options for TokenCredential.getToken. + */ +export interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * Tracing Context for the current request. + */ + tracingContext?: TracingContext; + }; + /** + * Claim details to perform the Continuous Access Evaluation authentication flow + */ + claims?: string; + /** + * Indicates whether to enable the Continuous Access Evaluation authentication flow + */ + enableCae?: boolean; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} +/** + * Represents an access token with an expiration time. + */ +export interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; +//# sourceMappingURL=tokenCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tokenCredential.d.ts.map b/node_modules/@azure/core-auth/dist/browser/tokenCredential.d.ts.map new file mode 100644 index 0000000..d564ffc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tokenCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.d.ts","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAE9C;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;;;;;OASG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC,CAAC;CAC7F;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,cAAc,CAAC,EAAE,cAAc,CAAC;KACjC,CAAC;IACF;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,kBAAkB,EAAE,MAAM,CAAC;CAC5B;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,eAAe,CAepF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tokenCredential.js b/node_modules/@azure/core-auth/dist/browser/tokenCredential.js new file mode 100644 index 0000000..5ca7f68 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tokenCredential.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tokenCredential.js.map b/node_modules/@azure/core-auth/dist/browser/tokenCredential.js.map new file mode 100644 index 0000000..2f79724 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tokenCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.js","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA6ElC;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAAC,UAAmB;IACnD,mEAAmE;IACnE,gEAAgE;IAChE,sEAAsE;IACtE,qEAAqE;IACrE,sDAAsD;IACtD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,OAAO,CACL,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;QAC7C,CAAC,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CACjF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing.js\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n /**\n * Indicates whether to enable the Continuous Access Evaluation authentication flow\n */\n enableCae?: boolean;\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tracing.d.ts b/node_modules/@azure/core-auth/dist/browser/tracing.d.ts new file mode 100644 index 0000000..c44eeeb --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tracing.d.ts @@ -0,0 +1,27 @@ +/** + * An interface structurally compatible with OpenTelemetry. + */ +export interface TracingContext { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): TracingContext; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): TracingContext; +} +//# sourceMappingURL=tracing.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tracing.d.ts.map b/node_modules/@azure/core-auth/dist/browser/tracing.d.ts.map new file mode 100644 index 0000000..ca2b79a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tracing.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.d.ts","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAMA;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;;;OAIG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;IAC/B;;;;;;OAMG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,GAAG,cAAc,CAAC;IACtD;;;;;OAKG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,cAAc,CAAC;CAC1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tracing.js b/node_modules/@azure/core-auth/dist/browser/tracing.js new file mode 100644 index 0000000..377718d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tracing.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/browser/tracing.js.map b/node_modules/@azure/core-auth/dist/browser/tracing.js.map new file mode 100644 index 0000000..fe00d40 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/browser/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// The interfaces in this file should be kept in sync with those\n// found in the `@azure/core-tracing` package.\n\n/**\n * An interface structurally compatible with OpenTelemetry.\n */\nexport interface TracingContext {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): TracingContext;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): TracingContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.d.ts new file mode 100644 index 0000000..b861382 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.d.ts @@ -0,0 +1,29 @@ +import { KeyCredential } from "./keyCredential.js"; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} +//# sourceMappingURL=azureKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.d.ts.map new file mode 100644 index 0000000..a9b7a6e --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAEnD;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;;;;OAKG;gBACS,GAAG,EAAE,MAAM;IAQvB;;;;;;;OAOG;IACI,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;CAGpC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js new file mode 100644 index 0000000..ea21c1d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js @@ -0,0 +1,43 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AzureKeyCredential = void 0; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + _key; + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +exports.AzureKeyCredential = AzureKeyCredential; +//# sourceMappingURL=azureKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js.map b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js.map new file mode 100644 index 0000000..bfd7c67 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.js","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAIlC;;;GAGG;AACH,MAAa,kBAAkB;IACrB,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;QACpD,CAAC;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAnCD,gDAmCC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { KeyCredential } from \"./keyCredential.js\";\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.d.ts new file mode 100644 index 0000000..e1d0f63 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.d.ts @@ -0,0 +1,54 @@ +/** + * Represents a credential defined by a static API name and key. + */ +export interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; +//# sourceMappingURL=azureNamedKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.d.ts.map new file mode 100644 index 0000000..f9225a0 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;CACvB;AAED;;;GAGG;AACH,qBAAa,uBAAwB,YAAW,kBAAkB;IAChE,OAAO,CAAC,IAAI,CAAS;IACrB,OAAO,CAAC,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;OAEG;IACH,IAAW,IAAI,IAAI,MAAM,CAExB;IAED;;;;;;OAMG;gBACS,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IASrC;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;CAQrD;AAED;;;;GAIG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,kBAAkB,CAM1F"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js new file mode 100644 index 0000000..e0b73fa --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js @@ -0,0 +1,69 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = void 0; +const core_util_1 = require("@azure/core-util"); +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + _key; + _name; + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return ((0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +exports.isNamedKeyCredential = isNamedKeyCredential; +//# sourceMappingURL=azureNamedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js.map b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js.map new file mode 100644 index 0000000..3a047db --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.js","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,gDAA0D;AAgB1D;;;GAGG;AACH,MAAa,uBAAuB;IAC1B,IAAI,CAAS;IACb,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YAClB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC;YACxB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;QACtE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAnDD,0DAmDC;AAED;;;;GAIG;AACH,SAAgB,oBAAoB,CAAC,UAAmB;IACtD,OAAO,CACL,IAAA,kCAAsB,EAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CACpC,CAAC;AACJ,CAAC;AAND,oDAMC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.d.ts b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.d.ts new file mode 100644 index 0000000..6e4ebcc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.d.ts @@ -0,0 +1,43 @@ +/** + * Represents a credential defined by a static shared access signature. + */ +export interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; +//# sourceMappingURL=azureSASCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.d.ts.map new file mode 100644 index 0000000..d5b4fc5 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.d.ts","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;CAC5B;AAED;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS,IAAI,MAAM,CAE7B;IAED;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;IAQ7B;;;;;;;OAOG;IACI,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,IAAI;CAO1C;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAIhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js new file mode 100644 index 0000000..1c29897 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js @@ -0,0 +1,56 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isSASCredential = exports.AzureSASCredential = void 0; +const core_util_1 = require("@azure/core-util"); +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +exports.AzureSASCredential = AzureSASCredential; +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return ((0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"); +} +exports.isSASCredential = isSASCredential; +//# sourceMappingURL=azureSASCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js.map b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js.map new file mode 100644 index 0000000..909f839 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.js","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,gDAA0D;AAY1D;;;GAGG;AACH,MAAa,kBAAkB;IACrB,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAED;;;;;OAKG;IACH,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;IACjC,CAAC;CACF;AAvCD,gDAuCC;AAED;;;;GAIG;AACH,SAAgB,eAAe,CAAC,UAAmB;IACjD,OAAO,CACL,IAAA,kCAAsB,EAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,CAC9F,CAAC;AACJ,CAAC;AAJD,0CAIC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/index.d.ts b/node_modules/@azure/core-auth/dist/commonjs/index.d.ts new file mode 100644 index 0000000..6d05ee8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/index.d.ts @@ -0,0 +1,7 @@ +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { KeyCredential, isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, NamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, SASCredential, isSASCredential } from "./azureSASCredential.js"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential, } from "./tokenCredential.js"; +export { TracingContext } from "./tracing.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..311c49b --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EACvB,kBAAkB,EAClB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EACL,eAAe,EACf,eAAe,EACf,WAAW,EACX,iBAAiB,GAClB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/index.js b/node_modules/@azure/core-auth/dist/commonjs/index.js new file mode 100644 index 0000000..5e9a66f --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/index.js @@ -0,0 +1,18 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isTokenCredential = exports.isSASCredential = exports.AzureSASCredential = exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = exports.isKeyCredential = exports.AzureKeyCredential = void 0; +var azureKeyCredential_js_1 = require("./azureKeyCredential.js"); +Object.defineProperty(exports, "AzureKeyCredential", { enumerable: true, get: function () { return azureKeyCredential_js_1.AzureKeyCredential; } }); +var keyCredential_js_1 = require("./keyCredential.js"); +Object.defineProperty(exports, "isKeyCredential", { enumerable: true, get: function () { return keyCredential_js_1.isKeyCredential; } }); +var azureNamedKeyCredential_js_1 = require("./azureNamedKeyCredential.js"); +Object.defineProperty(exports, "AzureNamedKeyCredential", { enumerable: true, get: function () { return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; } }); +Object.defineProperty(exports, "isNamedKeyCredential", { enumerable: true, get: function () { return azureNamedKeyCredential_js_1.isNamedKeyCredential; } }); +var azureSASCredential_js_1 = require("./azureSASCredential.js"); +Object.defineProperty(exports, "AzureSASCredential", { enumerable: true, get: function () { return azureSASCredential_js_1.AzureSASCredential; } }); +Object.defineProperty(exports, "isSASCredential", { enumerable: true, get: function () { return azureSASCredential_js_1.isSASCredential; } }); +var tokenCredential_js_1 = require("./tokenCredential.js"); +Object.defineProperty(exports, "isTokenCredential", { enumerable: true, get: function () { return tokenCredential_js_1.isTokenCredential; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/index.js.map b/node_modules/@azure/core-auth/dist/commonjs/index.js.map new file mode 100644 index 0000000..3279de8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,iEAA6D;AAApD,2HAAA,kBAAkB,OAAA;AAC3B,uDAAoE;AAA5C,mHAAA,eAAe,OAAA;AACvC,2EAIsC;AAHpC,qIAAA,uBAAuB,OAAA;AAEvB,kIAAA,oBAAoB,OAAA;AAEtB,iEAA6F;AAApF,2HAAA,kBAAkB,OAAA;AAAiB,wHAAA,eAAe,OAAA;AAE3D,2DAK8B;AAD5B,uHAAA,iBAAiB,OAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AzureKeyCredential } from \"./azureKeyCredential.js\";\nexport { KeyCredential, isKeyCredential } from \"./keyCredential.js\";\nexport {\n AzureNamedKeyCredential,\n NamedKeyCredential,\n isNamedKeyCredential,\n} from \"./azureNamedKeyCredential.js\";\nexport { AzureSASCredential, SASCredential, isSASCredential } from \"./azureSASCredential.js\";\n\nexport {\n TokenCredential,\n GetTokenOptions,\n AccessToken,\n isTokenCredential,\n} from \"./tokenCredential.js\";\n\nexport { TracingContext } from \"./tracing.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/keyCredential.d.ts b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.d.ts new file mode 100644 index 0000000..81dbe93 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.d.ts @@ -0,0 +1,16 @@ +/** + * Represents a credential defined by a static API key. + */ +export interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export declare function isKeyCredential(credential: unknown): credential is KeyCredential; +//# sourceMappingURL=keyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/keyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.d.ts.map new file mode 100644 index 0000000..217502d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.d.ts","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;CACtB;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAEhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js new file mode 100644 index 0000000..b21f3d2 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js @@ -0,0 +1,16 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isKeyCredential = void 0; +const core_util_1 = require("@azure/core-util"); +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +function isKeyCredential(credential) { + return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; +} +exports.isKeyCredential = isKeyCredential; +//# sourceMappingURL=keyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js.map b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js.map new file mode 100644 index 0000000..2f4d926 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.js","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,gDAA0D;AAY1D;;;;GAIG;AACH,SAAgB,eAAe,CAAC,UAAmB;IACjD,OAAO,IAAA,kCAAsB,EAAC,UAAU,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ,CAAC;AAC3F,CAAC;AAFD,0CAEC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * Tests an object to determine whether it implements KeyCredential.\n *\n * @param credential - The assumed KeyCredential to be tested.\n */\nexport function isKeyCredential(credential: unknown): credential is KeyCredential {\n return isObjectWithProperties(credential, [\"key\"]) && typeof credential.key === \"string\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/package.json b/node_modules/@azure/core-auth/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.d.ts b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.d.ts new file mode 100644 index 0000000..bf9a49a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.d.ts @@ -0,0 +1,77 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { TracingContext } from "./tracing.js"; +/** + * Represents a credential capable of providing an authentication token. + */ +export interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} +/** + * Defines options for TokenCredential.getToken. + */ +export interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * Tracing Context for the current request. + */ + tracingContext?: TracingContext; + }; + /** + * Claim details to perform the Continuous Access Evaluation authentication flow + */ + claims?: string; + /** + * Indicates whether to enable the Continuous Access Evaluation authentication flow + */ + enableCae?: boolean; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} +/** + * Represents an access token with an expiration time. + */ +export interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; +//# sourceMappingURL=tokenCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.d.ts.map new file mode 100644 index 0000000..d564ffc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.d.ts","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAE9C;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;;;;;OASG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC,CAAC;CAC7F;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,cAAc,CAAC,EAAE,cAAc,CAAC;KACjC,CAAC;IACF;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,kBAAkB,EAAE,MAAM,CAAC;CAC5B;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,eAAe,CAepF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js new file mode 100644 index 0000000..0a16ef9 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js @@ -0,0 +1,23 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isTokenCredential = void 0; +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +exports.isTokenCredential = isTokenCredential; +//# sourceMappingURL=tokenCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js.map b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js.map new file mode 100644 index 0000000..86dcbd8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.js","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AA6ElC;;;;GAIG;AACH,SAAgB,iBAAiB,CAAC,UAAmB;IACnD,mEAAmE;IACnE,gEAAgE;IAChE,sEAAsE;IACtE,qEAAqE;IACrE,sDAAsD;IACtD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,OAAO,CACL,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;QAC7C,CAAC,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CACjF,CAAC;AACJ,CAAC;AAfD,8CAeC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing.js\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n /**\n * Indicates whether to enable the Continuous Access Evaluation authentication flow\n */\n enableCae?: boolean;\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tracing.d.ts b/node_modules/@azure/core-auth/dist/commonjs/tracing.d.ts new file mode 100644 index 0000000..c44eeeb --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tracing.d.ts @@ -0,0 +1,27 @@ +/** + * An interface structurally compatible with OpenTelemetry. + */ +export interface TracingContext { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): TracingContext; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): TracingContext; +} +//# sourceMappingURL=tracing.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tracing.d.ts.map b/node_modules/@azure/core-auth/dist/commonjs/tracing.d.ts.map new file mode 100644 index 0000000..ca2b79a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tracing.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.d.ts","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAMA;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;;;OAIG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;IAC/B;;;;;;OAMG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,GAAG,cAAc,CAAC;IACtD;;;;;OAKG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,cAAc,CAAC;CAC1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tracing.js b/node_modules/@azure/core-auth/dist/commonjs/tracing.js new file mode 100644 index 0000000..faac44f --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tracing.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tracing.js.map b/node_modules/@azure/core-auth/dist/commonjs/tracing.js.map new file mode 100644 index 0000000..38dce3b --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// The interfaces in this file should be kept in sync with those\n// found in the `@azure/core-tracing` package.\n\n/**\n * An interface structurally compatible with OpenTelemetry.\n */\nexport interface TracingContext {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): TracingContext;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): TracingContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-auth/dist/core-auth.d.ts b/node_modules/@azure/core-auth/dist/core-auth.d.ts new file mode 100644 index 0000000..7e0b6d8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/core-auth.d.ts @@ -0,0 +1,254 @@ +import { AbortSignalLike } from '@azure/abort-controller'; + +/** + * Represents an access token with an expiration time. + */ +export declare interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} + +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} + +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} + +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} + +/** + * Defines options for TokenCredential.getToken. + */ +export declare interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * Tracing Context for the current request. + */ + tracingContext?: TracingContext; + }; + /** + * Claim details to perform the Continuous Access Evaluation authentication flow + */ + claims?: string; + /** + * Indicates whether to enable the Continuous Access Evaluation authentication flow + */ + enableCae?: boolean; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} + +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export declare function isKeyCredential(credential: unknown): credential is KeyCredential; + +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; + +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; + +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; + +/** + * Represents a credential defined by a static API key. + */ +export declare interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} + +/** + * Represents a credential defined by a static API name and key. + */ +export declare interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} + +/** + * Represents a credential defined by a static shared access signature. + */ +export declare interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} + +/** + * Represents a credential capable of providing an authentication token. + */ +export declare interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} + +/** + * An interface structurally compatible with OpenTelemetry. + */ +export declare interface TracingContext { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): TracingContext; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): TracingContext; +} + +export { } diff --git a/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.d.ts new file mode 100644 index 0000000..b861382 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.d.ts @@ -0,0 +1,29 @@ +import { KeyCredential } from "./keyCredential.js"; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} +//# sourceMappingURL=azureKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.d.ts.map new file mode 100644 index 0000000..a9b7a6e --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAEnD;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;;;;OAKG;gBACS,GAAG,EAAE,MAAM;IAQvB;;;;;;;OAOG;IACI,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;CAGpC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js new file mode 100644 index 0000000..050f0fd --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export class AzureKeyCredential { + _key; + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +//# sourceMappingURL=azureKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js.map b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js.map new file mode 100644 index 0000000..5bbf703 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.js","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;QACpD,CAAC;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { KeyCredential } from \"./keyCredential.js\";\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.d.ts new file mode 100644 index 0000000..e1d0f63 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.d.ts @@ -0,0 +1,54 @@ +/** + * Represents a credential defined by a static API name and key. + */ +export interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; +//# sourceMappingURL=azureNamedKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.d.ts.map new file mode 100644 index 0000000..f9225a0 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;CACvB;AAED;;;GAGG;AACH,qBAAa,uBAAwB,YAAW,kBAAkB;IAChE,OAAO,CAAC,IAAI,CAAS;IACrB,OAAO,CAAC,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;OAEG;IACH,IAAW,IAAI,IAAI,MAAM,CAExB;IAED;;;;;;OAMG;gBACS,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IASrC;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;CAQrD;AAED;;;;GAIG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,kBAAkB,CAM1F"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js new file mode 100644 index 0000000..7608850 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export class AzureNamedKeyCredential { + _key; + _name; + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js.map b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js.map new file mode 100644 index 0000000..ee293c1 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.js","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAgB1D;;;GAGG;AACH,MAAM,OAAO,uBAAuB;IAC1B,IAAI,CAAS;IACb,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YAClB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC;YACxB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;QACtE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,UAAmB;IACtD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CACpC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureSASCredential.d.ts b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.d.ts new file mode 100644 index 0000000..6e4ebcc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.d.ts @@ -0,0 +1,43 @@ +/** + * Represents a credential defined by a static shared access signature. + */ +export interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; +//# sourceMappingURL=azureSASCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureSASCredential.d.ts.map b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.d.ts.map new file mode 100644 index 0000000..d5b4fc5 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.d.ts","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;CAC5B;AAED;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS,IAAI,MAAM,CAE7B;IAED;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;IAQ7B;;;;;;;OAOG;IACI,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,IAAI;CAO1C;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAIhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js new file mode 100644 index 0000000..a7da3b3 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export class AzureSASCredential { + _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} +//# sourceMappingURL=azureSASCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js.map b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js.map new file mode 100644 index 0000000..5e15d1d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.js","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAY1D;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAED;;;;;OAKG;IACH,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;IACjC,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,CAC9F,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/index.d.ts b/node_modules/@azure/core-auth/dist/esm/index.d.ts new file mode 100644 index 0000000..6d05ee8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/index.d.ts @@ -0,0 +1,7 @@ +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { KeyCredential, isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, NamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, SASCredential, isSASCredential } from "./azureSASCredential.js"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential, } from "./tokenCredential.js"; +export { TracingContext } from "./tracing.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/index.d.ts.map b/node_modules/@azure/core-auth/dist/esm/index.d.ts.map new file mode 100644 index 0000000..311c49b --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EACvB,kBAAkB,EAClB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EACL,eAAe,EACf,eAAe,EACf,WAAW,EACX,iBAAiB,GAClB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/index.js b/node_modules/@azure/core-auth/dist/esm/index.js new file mode 100644 index 0000000..38b6669 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/index.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, isSASCredential } from "./azureSASCredential.js"; +export { isTokenCredential, } from "./tokenCredential.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/index.js.map b/node_modules/@azure/core-auth/dist/esm/index.js.map new file mode 100644 index 0000000..bcdd705 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAiB,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EAEvB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAiB,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAIL,iBAAiB,GAClB,MAAM,sBAAsB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AzureKeyCredential } from \"./azureKeyCredential.js\";\nexport { KeyCredential, isKeyCredential } from \"./keyCredential.js\";\nexport {\n AzureNamedKeyCredential,\n NamedKeyCredential,\n isNamedKeyCredential,\n} from \"./azureNamedKeyCredential.js\";\nexport { AzureSASCredential, SASCredential, isSASCredential } from \"./azureSASCredential.js\";\n\nexport {\n TokenCredential,\n GetTokenOptions,\n AccessToken,\n isTokenCredential,\n} from \"./tokenCredential.js\";\n\nexport { TracingContext } from \"./tracing.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/keyCredential.d.ts b/node_modules/@azure/core-auth/dist/esm/keyCredential.d.ts new file mode 100644 index 0000000..81dbe93 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/keyCredential.d.ts @@ -0,0 +1,16 @@ +/** + * Represents a credential defined by a static API key. + */ +export interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export declare function isKeyCredential(credential: unknown): credential is KeyCredential; +//# sourceMappingURL=keyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/keyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/esm/keyCredential.d.ts.map new file mode 100644 index 0000000..217502d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/keyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.d.ts","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;CACtB;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAEhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/keyCredential.js b/node_modules/@azure/core-auth/dist/esm/keyCredential.js new file mode 100644 index 0000000..0e1d299 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/keyCredential.js @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export function isKeyCredential(credential) { + return isObjectWithProperties(credential, ["key"]) && typeof credential.key === "string"; +} +//# sourceMappingURL=keyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/keyCredential.js.map b/node_modules/@azure/core-auth/dist/esm/keyCredential.js.map new file mode 100644 index 0000000..b8fa9f4 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/keyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.js","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAY1D;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,sBAAsB,CAAC,UAAU,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ,CAAC;AAC3F,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * Tests an object to determine whether it implements KeyCredential.\n *\n * @param credential - The assumed KeyCredential to be tested.\n */\nexport function isKeyCredential(credential: unknown): credential is KeyCredential {\n return isObjectWithProperties(credential, [\"key\"]) && typeof credential.key === \"string\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/package.json b/node_modules/@azure/core-auth/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-auth/dist/esm/tokenCredential.d.ts b/node_modules/@azure/core-auth/dist/esm/tokenCredential.d.ts new file mode 100644 index 0000000..bf9a49a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tokenCredential.d.ts @@ -0,0 +1,77 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { TracingContext } from "./tracing.js"; +/** + * Represents a credential capable of providing an authentication token. + */ +export interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} +/** + * Defines options for TokenCredential.getToken. + */ +export interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * Tracing Context for the current request. + */ + tracingContext?: TracingContext; + }; + /** + * Claim details to perform the Continuous Access Evaluation authentication flow + */ + claims?: string; + /** + * Indicates whether to enable the Continuous Access Evaluation authentication flow + */ + enableCae?: boolean; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} +/** + * Represents an access token with an expiration time. + */ +export interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; +//# sourceMappingURL=tokenCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tokenCredential.d.ts.map b/node_modules/@azure/core-auth/dist/esm/tokenCredential.d.ts.map new file mode 100644 index 0000000..d564ffc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tokenCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.d.ts","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAE9C;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;;;;;OASG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC,CAAC;CAC7F;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,cAAc,CAAC,EAAE,cAAc,CAAC;KACjC,CAAC;IACF;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,kBAAkB,EAAE,MAAM,CAAC;CAC5B;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,eAAe,CAepF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tokenCredential.js b/node_modules/@azure/core-auth/dist/esm/tokenCredential.js new file mode 100644 index 0000000..5ca7f68 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tokenCredential.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tokenCredential.js.map b/node_modules/@azure/core-auth/dist/esm/tokenCredential.js.map new file mode 100644 index 0000000..2f79724 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tokenCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.js","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA6ElC;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAAC,UAAmB;IACnD,mEAAmE;IACnE,gEAAgE;IAChE,sEAAsE;IACtE,qEAAqE;IACrE,sDAAsD;IACtD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,OAAO,CACL,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;QAC7C,CAAC,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CACjF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing.js\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n /**\n * Indicates whether to enable the Continuous Access Evaluation authentication flow\n */\n enableCae?: boolean;\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tracing.d.ts b/node_modules/@azure/core-auth/dist/esm/tracing.d.ts new file mode 100644 index 0000000..c44eeeb --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tracing.d.ts @@ -0,0 +1,27 @@ +/** + * An interface structurally compatible with OpenTelemetry. + */ +export interface TracingContext { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): TracingContext; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): TracingContext; +} +//# sourceMappingURL=tracing.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tracing.d.ts.map b/node_modules/@azure/core-auth/dist/esm/tracing.d.ts.map new file mode 100644 index 0000000..ca2b79a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tracing.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.d.ts","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAMA;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;;;OAIG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;IAC/B;;;;;;OAMG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,GAAG,cAAc,CAAC;IACtD;;;;;OAKG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,cAAc,CAAC;CAC1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tracing.js b/node_modules/@azure/core-auth/dist/esm/tracing.js new file mode 100644 index 0000000..377718d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tracing.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/esm/tracing.js.map b/node_modules/@azure/core-auth/dist/esm/tracing.js.map new file mode 100644 index 0000000..fe00d40 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/esm/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// The interfaces in this file should be kept in sync with those\n// found in the `@azure/core-tracing` package.\n\n/**\n * An interface structurally compatible with OpenTelemetry.\n */\nexport interface TracingContext {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): TracingContext;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): TracingContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.d.ts new file mode 100644 index 0000000..b861382 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.d.ts @@ -0,0 +1,29 @@ +import { KeyCredential } from "./keyCredential.js"; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} +//# sourceMappingURL=azureKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.d.ts.map new file mode 100644 index 0000000..a9b7a6e --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAEnD;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;;;;OAKG;gBACS,GAAG,EAAE,MAAM;IAQvB;;;;;;;OAOG;IACI,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;CAGpC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js new file mode 100644 index 0000000..050f0fd --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export class AzureKeyCredential { + _key; + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +//# sourceMappingURL=azureKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js.map b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js.map new file mode 100644 index 0000000..5bbf703 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.js","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,IAAI,CAAS;IAErB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;QACpD,CAAC;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { KeyCredential } from \"./keyCredential.js\";\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.d.ts b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.d.ts new file mode 100644 index 0000000..e1d0f63 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.d.ts @@ -0,0 +1,54 @@ +/** + * Represents a credential defined by a static API name and key. + */ +export interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; +//# sourceMappingURL=azureNamedKeyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.d.ts.map new file mode 100644 index 0000000..f9225a0 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.d.ts","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;CACvB;AAED;;;GAGG;AACH,qBAAa,uBAAwB,YAAW,kBAAkB;IAChE,OAAO,CAAC,IAAI,CAAS;IACrB,OAAO,CAAC,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG,IAAI,MAAM,CAEvB;IAED;;OAEG;IACH,IAAW,IAAI,IAAI,MAAM,CAExB;IAED;;;;;;OAMG;gBACS,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IASrC;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;CAQrD;AAED;;;;GAIG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,kBAAkB,CAM1F"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js new file mode 100644 index 0000000..7608850 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export class AzureNamedKeyCredential { + _key; + _name; + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js.map b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js.map new file mode 100644 index 0000000..ee293c1 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.js","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAgB1D;;;GAGG;AACH,MAAM,OAAO,uBAAuB;IAC1B,IAAI,CAAS;IACb,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YAClB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAED;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC;YACxB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;QACtE,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,UAAmB;IACtD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CACpC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.d.ts b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.d.ts new file mode 100644 index 0000000..6e4ebcc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.d.ts @@ -0,0 +1,43 @@ +/** + * Represents a credential defined by a static shared access signature. + */ +export interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; +//# sourceMappingURL=azureSASCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.d.ts.map new file mode 100644 index 0000000..d5b4fc5 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.d.ts","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;CAC5B;AAED;;;GAGG;AACH,qBAAa,kBAAmB,YAAW,aAAa;IACtD,OAAO,CAAC,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS,IAAI,MAAM,CAE7B;IAED;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;IAQ7B;;;;;;;OAOG;IACI,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,IAAI;CAO1C;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAIhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js new file mode 100644 index 0000000..a7da3b3 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export class AzureSASCredential { + _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} +//# sourceMappingURL=azureSASCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js.map b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js.map new file mode 100644 index 0000000..5e15d1d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.js","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAY1D;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,UAAU,CAAS;IAE3B;;OAEG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAED;;;;;OAKG;IACH,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;IACjC,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,CAC9F,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/index.d.ts b/node_modules/@azure/core-auth/dist/react-native/index.d.ts new file mode 100644 index 0000000..6d05ee8 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/index.d.ts @@ -0,0 +1,7 @@ +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { KeyCredential, isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, NamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, SASCredential, isSASCredential } from "./azureSASCredential.js"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential, } from "./tokenCredential.js"; +export { TracingContext } from "./tracing.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/index.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..311c49b --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EACvB,kBAAkB,EAClB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EACL,eAAe,EACf,eAAe,EACf,WAAW,EACX,iBAAiB,GAClB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/index.js b/node_modules/@azure/core-auth/dist/react-native/index.js new file mode 100644 index 0000000..38b6669 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/index.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AzureKeyCredential } from "./azureKeyCredential.js"; +export { isKeyCredential } from "./keyCredential.js"; +export { AzureNamedKeyCredential, isNamedKeyCredential, } from "./azureNamedKeyCredential.js"; +export { AzureSASCredential, isSASCredential } from "./azureSASCredential.js"; +export { isTokenCredential, } from "./tokenCredential.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/index.js.map b/node_modules/@azure/core-auth/dist/react-native/index.js.map new file mode 100644 index 0000000..bcdd705 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAiB,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACpE,OAAO,EACL,uBAAuB,EAEvB,oBAAoB,GACrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,kBAAkB,EAAiB,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAIL,iBAAiB,GAClB,MAAM,sBAAsB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AzureKeyCredential } from \"./azureKeyCredential.js\";\nexport { KeyCredential, isKeyCredential } from \"./keyCredential.js\";\nexport {\n AzureNamedKeyCredential,\n NamedKeyCredential,\n isNamedKeyCredential,\n} from \"./azureNamedKeyCredential.js\";\nexport { AzureSASCredential, SASCredential, isSASCredential } from \"./azureSASCredential.js\";\n\nexport {\n TokenCredential,\n GetTokenOptions,\n AccessToken,\n isTokenCredential,\n} from \"./tokenCredential.js\";\n\nexport { TracingContext } from \"./tracing.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/keyCredential.d.ts b/node_modules/@azure/core-auth/dist/react-native/keyCredential.d.ts new file mode 100644 index 0000000..81dbe93 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/keyCredential.d.ts @@ -0,0 +1,16 @@ +/** + * Represents a credential defined by a static API key. + */ +export interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export declare function isKeyCredential(credential: unknown): credential is KeyCredential; +//# sourceMappingURL=keyCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/keyCredential.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/keyCredential.d.ts.map new file mode 100644 index 0000000..217502d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/keyCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.d.ts","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;CACtB;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,aAAa,CAEhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/keyCredential.js b/node_modules/@azure/core-auth/dist/react-native/keyCredential.js new file mode 100644 index 0000000..0e1d299 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/keyCredential.js @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "@azure/core-util"; +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +export function isKeyCredential(credential) { + return isObjectWithProperties(credential, ["key"]) && typeof credential.key === "string"; +} +//# sourceMappingURL=keyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/keyCredential.js.map b/node_modules/@azure/core-auth/dist/react-native/keyCredential.js.map new file mode 100644 index 0000000..b8fa9f4 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/keyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keyCredential.js","sourceRoot":"","sources":["../../src/keyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAY1D;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,sBAAsB,CAAC,UAAU,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ,CAAC;AAC3F,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"@azure/core-util\";\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * Tests an object to determine whether it implements KeyCredential.\n *\n * @param credential - The assumed KeyCredential to be tested.\n */\nexport function isKeyCredential(credential: unknown): credential is KeyCredential {\n return isObjectWithProperties(credential, [\"key\"]) && typeof credential.key === \"string\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/package.json b/node_modules/@azure/core-auth/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-auth/dist/react-native/tokenCredential.d.ts b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.d.ts new file mode 100644 index 0000000..bf9a49a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.d.ts @@ -0,0 +1,77 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { TracingContext } from "./tracing.js"; +/** + * Represents a credential capable of providing an authentication token. + */ +export interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} +/** + * Defines options for TokenCredential.getToken. + */ +export interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * Tracing Context for the current request. + */ + tracingContext?: TracingContext; + }; + /** + * Claim details to perform the Continuous Access Evaluation authentication flow + */ + claims?: string; + /** + * Indicates whether to enable the Continuous Access Evaluation authentication flow + */ + enableCae?: boolean; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} +/** + * Represents an access token with an expiration time. + */ +export interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; +//# sourceMappingURL=tokenCredential.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tokenCredential.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.d.ts.map new file mode 100644 index 0000000..d564ffc --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.d.ts","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAE9C;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;;;;;OASG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC,CAAC;CAC7F;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF;;OAEG;IACH,cAAc,CAAC,EAAE;QACf;;WAEG;QACH,cAAc,CAAC,EAAE,cAAc,CAAC;KACjC,CAAC;IACF;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,kBAAkB,EAAE,MAAM,CAAC;CAC5B;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,IAAI,eAAe,CAepF"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tokenCredential.js b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.js new file mode 100644 index 0000000..5ca7f68 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tokenCredential.js.map b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.js.map new file mode 100644 index 0000000..2f79724 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tokenCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.js","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA6ElC;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAAC,UAAmB;IACnD,mEAAmE;IACnE,gEAAgE;IAChE,sEAAsE;IACtE,qEAAqE;IACrE,sDAAsD;IACtD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,OAAO,CACL,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;QAC7C,CAAC,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CACjF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing.js\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n /**\n * Indicates whether to enable the Continuous Access Evaluation authentication flow\n */\n enableCae?: boolean;\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tracing.d.ts b/node_modules/@azure/core-auth/dist/react-native/tracing.d.ts new file mode 100644 index 0000000..c44eeeb --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tracing.d.ts @@ -0,0 +1,27 @@ +/** + * An interface structurally compatible with OpenTelemetry. + */ +export interface TracingContext { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): TracingContext; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): TracingContext; +} +//# sourceMappingURL=tracing.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tracing.d.ts.map b/node_modules/@azure/core-auth/dist/react-native/tracing.d.ts.map new file mode 100644 index 0000000..ca2b79a --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tracing.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.d.ts","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAMA;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;;;OAIG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;IAC/B;;;;;;OAMG;IACH,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,GAAG,cAAc,CAAC;IACtD;;;;;OAKG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,cAAc,CAAC;CAC1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tracing.js b/node_modules/@azure/core-auth/dist/react-native/tracing.js new file mode 100644 index 0000000..377718d --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tracing.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/react-native/tracing.js.map b/node_modules/@azure/core-auth/dist/react-native/tracing.js.map new file mode 100644 index 0000000..fe00d40 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/react-native/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// The interfaces in this file should be kept in sync with those\n// found in the `@azure/core-tracing` package.\n\n/**\n * An interface structurally compatible with OpenTelemetry.\n */\nexport interface TracingContext {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): TracingContext;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): TracingContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/LICENSE b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/README.md b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/README.md new file mode 100644 index 0000000..2c8a197 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/README.md @@ -0,0 +1,69 @@ +# Azure Abort Controller client library for JavaScript + +The `@azure/abort-controller` package provides `AbortSignalLike` interface and +`AbortError` classes to make it easier to work with the +[AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) + and the `AbortSignal` used by +[fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API) built into modern JavaScript platforms. + +Customers of Azure SDK for JavaScript in general do not need to use this library. Instead they +use `AbortController` and `AbortSignal` provided by their platforms and pass the abort signals to Azure SDK operations. + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller) +- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme) + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/abort-controller +``` + +## Key Concepts + +Use `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel +pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`. +An `AbortSignal` can also be returned directly from a static method, e.g. `AbortSignal.timeout(100)`. +that is cancelled after 100 milliseconds. + +## Examples + +The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is +of the abort signal. + +### Example 1 - basic usage + +```js +const controller = new AbortController(); +doAsyncWork({ abortSignal: controller.signal }); + +// at some point later +controller.abort(); +``` + +### Example 2 - Aborting with timeout + +```js +const signal = AbortSignal.timeout(1000); +doAsyncWork({ abortSignal: signal }); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fabort-controller%2FREADME.png) diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/abort-controller.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/abort-controller.d.ts new file mode 100644 index 0000000..902fd4d --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/abort-controller.d.ts @@ -0,0 +1,42 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} + +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export declare interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} + +export { } diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/package.json b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js new file mode 100644 index 0000000..c7a0749 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js @@ -0,0 +1,31 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map new file mode 100644 index 0000000..0514343 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAa,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AALD,gCAKC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js new file mode 100644 index 0000000..aa54840 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map new file mode 100644 index 0000000..5fea0c4 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.js new file mode 100644 index 0000000..b589ef0 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbortError = void 0; +var AbortError_js_1 = require("./AbortError.js"); +Object.defineProperty(exports, "AbortError", { enumerable: true, get: function () { return AbortError_js_1.AbortError; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.js.map new file mode 100644 index 0000000..48ef08c --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAMlC,iDAA6C;AAApC,2GAAA,UAAU,OAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/package.json b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/package.json b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.d.ts b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.js b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.js.map b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/package.json b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/package.json new file mode 100644 index 0000000..d8375e5 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/@azure/abort-controller/package.json @@ -0,0 +1,116 @@ +{ + "name": "@azure/abort-controller", + "sdk-type": "client", + "version": "2.1.1", + "description": "Microsoft Azure SDK for JavaScript - Aborter", + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sideEffects": false, + "type": "module", + "main": "./dist/commonjs/index.js", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=18.0.0" + }, + "keywords": [ + "azure", + "aborter", + "abortsignal", + "cancellation", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,mjs,cjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,mjs,cjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-auth/package.json b/node_modules/@azure/core-auth/package.json new file mode 100644 index 0000000..faf8695 --- /dev/null +++ b/node_modules/@azure/core-auth/package.json @@ -0,0 +1,112 @@ +{ + "name": "@azure/core-auth", + "version": "1.7.1", + "description": "Provides low-level interfaces and helper methods for authentication in Azure SDK", + "sdk-type": "client", + "type": "module", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "authentication", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=18.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-auth/README.md", + "sideEffects": false, + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.0.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-http/LICENSE b/node_modules/@azure/core-http/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-http/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-http/README.md b/node_modules/@azure/core-http/README.md new file mode 100644 index 0000000..207739c --- /dev/null +++ b/node_modules/@azure/core-http/README.md @@ -0,0 +1,72 @@ +# Azure Core HTTP client library for JavaScript + +This is the core HTTP pipeline for Azure SDK JavaScript libraries which work in the browser and Node.js. This library is primarily intended to be used in code generated by [AutoRest](https://github.com/Azure/Autorest) and [`autorest.typescript`](https://github.com/Azure/autorest.typescript). + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Installation + +This package is primarily used in generated code and not meant to be consumed directly by end users. + +## Key concepts + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/docs/architectureOverview.md). + +## Examples + +Examples can be found in the `samples` folder. + +## Next steps + +- Build this library (`core-http`). For more information on how to build project in this repo, please refer to the [Contributing Guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md). + +- The code in `samples\node-sample.ts` shows how to create a `ServiceClient` instance with a test `TokenCredential` implementation and use the client instance to perform a `GET` operation from the Azure management service endpoint for subscriptions. To run the code, first obtain an access token to the Azure management service. + +One easy way to get an access token is using [Azure CLI](https://docs.microsoft.com/cli/azure/?view=azure-cli-latest) + +1. Sign in +```shell +az login +``` +2. Select the subscription to use +```shell +az account set -s +``` +3. Obtain an access token +```shell +az account get-access-token --resource=https://management.azure.com +``` + +### NodeJS + +- Set values of `subscriptionId` and `token` variable in `samples/node-sample.ts` + +- Change directory to samples folder, compile the TypeScript code, then run the sample + +``` +cd samples +tsc node-sample.ts +node node-sample.js +``` + +### Browser + +- Set values of `subscriptionId` and `token` variable in `samples/index.js` +- Follow the instructions of [JavaScript Bundling Guide using Parcel](https://github.com/Azure/azure-sdk-for-js/blob/main/documentation/Bundling.md#using-parcel) to build and run the code in the browser. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-http%2FREADME.png) diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js new file mode 100644 index 0000000..5deff57 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// NOTE: we've moved this code into core-tracing but these functions +// were a part of the GA'd library and can't be removed until the next major +// release. They currently get called always, even if tracing is not enabled. +import { createSpanFunction as coreTracingCreateSpanFunction } from "@azure/core-tracing"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export function createSpanFunction(args) { + return coreTracingCreateSpanFunction(args); +} +//# sourceMappingURL=createSpanLegacy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map new file mode 100644 index 0000000..9380697 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpanLegacy.js","sourceRoot":"","sources":["../../src/createSpanLegacy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,oEAAoE;AACpE,4EAA4E;AAC5E,6EAA6E;AAE7E,OAAO,EAAQ,kBAAkB,IAAI,6BAA6B,EAAE,MAAM,qBAAqB,CAAC;AAoBhG;;;;;;;;GAQG;AACH,MAAM,UAAU,kBAAkB,CAChC,IAAgB;IAKhB,OAAO,6BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// NOTE: we've moved this code into core-tracing but these functions\n// were a part of the GA'd library and can't be removed until the next major\n// release. They currently get called always, even if tracing is not enabled.\n\nimport { Span, createSpanFunction as coreTracingCreateSpanFunction } from \"@azure/core-tracing\";\nimport { OperationOptions } from \"./operationOptions\";\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use core-tracing instead.\n * @hidden\n */\nexport interface SpanConfig {\n /**\n * Package name prefix\n */\n packagePrefix: string;\n /**\n * Service namespace\n */\n namespace: string;\n}\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nexport function createSpanFunction(\n args: SpanConfig\n): (\n operationName: string,\n operationOptions: T\n) => { span: Span; updatedOptions: T } {\n return coreTracingCreateSpanFunction(args);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js new file mode 100644 index 0000000..1ac13dd --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +export const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export class ExpiringAccessTokenCache { + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + this.cachedToken = undefined; + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map new file mode 100644 index 0000000..8786e71 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenCache.js","sourceRoot":"","sources":["../../../src/credentials/accessTokenCache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC,YAAY;AAqB/D;;;;;;GAMG;AACH,MAAM,OAAO,wBAAwB;IAInC;;;OAGG;IACH,YAAY,uBAA+B,oBAAoB;QANvD,gBAAW,GAAiB,SAAS,CAAC;QAO5C,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;IACnD,CAAC;IAED;;;OAGG;IACH,cAAc,CAAC,WAAoC;QACjD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED;;OAEG;IACH,cAAc;QACZ,IACE,IAAI,CAAC,WAAW;YAChB,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAC7E;YACA,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;SAC9B;QAED,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken } from \"@azure/core-auth\";\n\n/**\n * Defines the default token refresh buffer duration.\n */\nexport const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n\n/**\n * Provides a cache for an AccessToken that was that\n * was returned from a TokenCredential.\n */\nexport interface AccessTokenCache {\n /**\n * Sets the cached token.\n *\n * @param accessToken - The {@link AccessToken} to be cached or null to\n * clear the cached token.\n */\n setCachedToken(accessToken: AccessToken | undefined): void;\n\n /**\n * Returns the cached {@link AccessToken} or undefined if nothing is cached.\n */\n getCachedToken(): AccessToken | undefined;\n}\n\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class ExpiringAccessTokenCache implements AccessTokenCache {\n private tokenRefreshBufferMs: number;\n private cachedToken?: AccessToken = undefined;\n\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n constructor(tokenRefreshBufferMs: number = TokenRefreshBufferMs) {\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n\n /**\n * Saves an access token into the internal in-memory cache.\n * @param accessToken - Access token or undefined to clear the cache.\n */\n setCachedToken(accessToken: AccessToken | undefined): void {\n this.cachedToken = accessToken;\n }\n\n /**\n * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon.\n */\n getCachedToken(): AccessToken | undefined {\n if (\n this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp\n ) {\n this.cachedToken = undefined;\n }\n\n return this.cachedToken;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js new file mode 100644 index 0000000..a866fb9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export class AccessTokenRefresher { + constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { + this.credential = credential; + this.scopes = scopes; + this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; + this.lastCalled = 0; + } + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady() { + // We're only ready for a new refresh if the required milliseconds have passed. + return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); + } + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + async getToken(options) { + this.lastCalled = Date.now(); + const token = await this.credential.getToken(this.scopes, options); + this.promise = undefined; + return token || undefined; + } + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options) { + if (!this.promise) { + this.promise = this.getToken(options); + } + return this.promise; + } +} +//# sourceMappingURL=accessTokenRefresher.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map new file mode 100644 index 0000000..3388d9b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenRefresher.js","sourceRoot":"","sources":["../../../src/credentials/accessTokenRefresher.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;GAIG;AACH,MAAM,OAAO,oBAAoB;IAI/B,YACU,UAA2B,EAC3B,MAAyB,EACzB,uCAA+C,KAAK;QAFpD,eAAU,GAAV,UAAU,CAAiB;QAC3B,WAAM,GAAN,MAAM,CAAmB;QACzB,yCAAoC,GAApC,oCAAoC,CAAgB;QALtD,eAAU,GAAG,CAAC,CAAC;IAMpB,CAAC;IAEJ;;;OAGG;IACI,OAAO;QACZ,+EAA+E;QAC/E,OAAO,CACL,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,oCAAoC,CAC7F,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACK,KAAK,CAAC,QAAQ,CAAC,OAAwB;QAC7C,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACnE,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QACzB,OAAO,KAAK,IAAI,SAAS,CAAC;IAC5B,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,OAAwB;QACrC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\n\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class AccessTokenRefresher {\n private promise: Promise | undefined;\n private lastCalled = 0;\n\n constructor(\n private credential: TokenCredential,\n private scopes: string | string[],\n private requiredMillisecondsBeforeNewRefresh: number = 30000\n ) {}\n\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n public isReady(): boolean {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (\n !this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh\n );\n }\n\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n private async getToken(options: GetTokenOptions): Promise {\n this.lastCalled = Date.now();\n const token = await this.credential.getToken(this.scopes, options);\n this.promise = undefined;\n return token || undefined;\n }\n\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n public refresh(options: GetTokenOptions): Promise {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n\n return this.promise;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js new file mode 100644 index 0000000..d3f2a4f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "../httpHeaders"; +/** + * Authenticates to a service using an API key. + */ +export class ApiKeyCredentials { + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + if (!webResource) { + return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + return Promise.resolve(webResource); + } +} +//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map new file mode 100644 index 0000000..db7ff15 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.js","sourceRoot":"","sources":["../../../src/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAkB7C;;GAEG;AACH,MAAM,OAAO,iBAAiB;IAU5B;;OAEG;IACH,YAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,0HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,uEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,MAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAI,GAAG,GAAG,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js new file mode 100644 index 0000000..fc49c72 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +import { HttpHeaders } from "../httpHeaders"; +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export class BasicAuthenticationCredentials { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} +//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map new file mode 100644 index 0000000..c512900 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.js","sourceRoot":"","sources":["../../../src/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAI7C,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;;GAEG;AACH,MAAM,OAAO,8BAA8B;IAiBzC;;;;;;OAMG;IACH,YACE,QAAgB,EAChB,QAAgB,EAChB,sBAA8B,4BAA4B;QAhB5D;;;WAGG;QACH,wBAAmB,GAAW,4BAA4B,CAAC;QAczD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAA4B;QACtC,MAAM,WAAW,GAAG,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,kBAAkB,GAAG,GAAG,IAAI,CAAC,mBAAmB,IAAI,MAAM,CAAC,YAAY,CAAC,WAAW,CAAC,EAAE,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\n/**\n * A simple {@link ServiceClientCredential} that authenticates with a username and a password.\n */\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n /**\n * Username\n */\n userName: string;\n\n /**\n * Password\n */\n password: string;\n\n /**\n * Authorization scheme. Defaults to \"Basic\".\n * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes\n */\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js new file mode 100644 index 0000000..3cfbd7a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map new file mode 100644 index 0000000..64678c6 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.js","sourceRoot":"","sources":["../../../src/credentials/credentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A function that receives a challenge and resolves a promise with a string token.\n * @deprecated The Authenticator type is not currently in use.\n */\nexport type Authenticator = (challenge: unknown) => Promise;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js new file mode 100644 index 0000000..f44565f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map new file mode 100644 index 0000000..48908f3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.js","sourceRoot":"","sources":["../../../src/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header).\n */\nexport interface ServiceClientCredentials {\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike/request to be signed.\n * @returns The signed request object;\n */\n signRequest(webResource: WebResourceLike): Promise;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js new file mode 100644 index 0000000..ac50243 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} +//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map new file mode 100644 index 0000000..b4ff2de --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.js","sourceRoot":"","sources":["../../../src/credentials/topicCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAA2B,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEjF;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,iBAAiB;IACrD;;;;OAIG;IACH,YAAY,QAAgB;QAC1B,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,MAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,KAAK,CAAC,OAAO,CAAC,CAAC;IACjB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ApiKeyCredentialOptions, ApiKeyCredentials } from \"./apiKeyCredentials\";\n\n/**\n * A {@link TopicCredentials} object used for Azure Event Grid.\n */\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js new file mode 100644 index 0000000..1f5e7fa --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map new file mode 100644 index 0000000..99be6d9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.js","sourceRoot":"","sources":["../../src/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { XhrHttpClient as DefaultHttpClient } from \"./xhrHttpClient\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js new file mode 100644 index 0000000..79177d5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map new file mode 100644 index 0000000..4e2660b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.js","sourceRoot":"","sources":["../../src/defaultHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { NodeFetchHttpClient as DefaultHttpClient } from \"./nodeFetchHttpClient\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js b/node_modules/@azure/core-http/dist-esm/src/httpClient.js new file mode 100644 index 0000000..8908881 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map new file mode 100644 index 0000000..eeb169e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.js","sourceRoot":"","sources":["../../src/httpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy } from \"./policies/requestPolicy\";\n\n/**\n * An interface that can send HttpRequests and receive promised HttpResponses.\n */\nexport interface HttpClient extends RequestPolicy {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js new file mode 100644 index 0000000..48ec59a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { DefaultHttpClient } from "./defaultHttpClient"; +let cachedHttpClient; +export function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new DefaultHttpClient(); + } + return cachedHttpClient; +} +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map new file mode 100644 index 0000000..fd7907a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClientCache.js","sourceRoot":"","sources":["../../src/httpClientCache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,IAAI,gBAAwC,CAAC;AAE7C,MAAM,UAAU,0BAA0B;IACxC,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,IAAI,iBAAiB,EAAE,CAAC;KAC5C;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\n\nlet cachedHttpClient: HttpClient | undefined;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!cachedHttpClient) {\n cachedHttpClient = new DefaultHttpClient();\n }\n\n return cachedHttpClient;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js new file mode 100644 index 0000000..7f7716e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +export function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString().trim(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map new file mode 100644 index 0000000..d3ae618 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.js","sourceRoot":"","sources":["../../src/httpHeaders.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4ED,MAAM,UAAU,iBAAiB,CAAC,MAAgB;IAChD,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAWlB,CAAC;QACF,IACE,OAAO,UAAU,CAAC,UAAU,KAAK,UAAU;YAC3C,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU;YACtC,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;YACpC,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;YACpC,OAAO,UAAU,CAAC,QAAQ,KAAK,UAAU;YACzC,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU;YACvC,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;YAC7C,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;YAC7C,OAAO,UAAU,CAAC,WAAW,KAAK,UAAU;YAC5C,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU,EACvC;YACA,OAAO,IAAI,CAAC;SACb;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,WAAW;IAGtB,YAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE;SACrC,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,GAAG,CAAC,UAAkB;QAC3B,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IAC5C,CAAC;IAED;;OAEG;IACI,QAAQ,CAAC,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,UAAkB;QAC9B,MAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,UAAU;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7C,CAAC;IAED;;OAEG;IACI,YAAY;QACjB,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACI,YAAY;QACjB,MAAM,YAAY,GAAa,EAAE,CAAC;QAClC,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,UAAsC,EAAE;QACpD,MAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,YAAY,EAAE;YACxB,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aACpC;SACF;aAAM;YACL,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aAClD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACI,KAAK;QACV,MAAM,sBAAsB,GAAmB,EAAE,CAAC;QAClD,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,sBAAsB,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SACpD;QACD,OAAO,IAAI,WAAW,CAAC,sBAAsB,CAAC,CAAC;IACjD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string): string {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(options?: { preserveCase?: boolean }): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n rawHeaders: unknown;\n clone: unknown;\n get: unknown;\n set: unknown;\n contains: unknown;\n remove: unknown;\n headersArray: unknown;\n headerValues: unknown;\n headerNames: unknown;\n toJson: unknown;\n };\n if (\n typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\"\n ) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders implements HttpHeadersLike {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString().trim(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n return this.toJson({ preserveCase: true });\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header values that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(options: { preserveCase?: boolean } = {}): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n if (options.preserveCase) {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name] = header.value;\n }\n } else {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[getHeaderKey(header.name)] = header.value;\n }\n }\n return result;\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson({ preserveCase: true }));\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n const resultPreservingCasing: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n resultPreservingCasing[header.name] = header.value;\n }\n return new HttpHeaders(resultPreservingCasing);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js new file mode 100644 index 0000000..540684d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map new file mode 100644 index 0000000..abf8659 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.js","sourceRoot":"","sources":["../../src/httpOperationResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { WebResourceLike } from \"./webResource\";\n\n/**\n * The properties on an HTTP response which will always be present.\n */\nexport interface HttpResponse {\n /**\n * The raw request\n */\n request: WebResourceLike;\n\n /**\n * The HTTP response status (e.g. 200)\n */\n status: number;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeadersLike;\n}\n\ndeclare global {\n /**\n * Stub declaration of the browser-only Blob type.\n * Full type information can be obtained by including \"lib\": [\"dom\"] in tsconfig.json.\n */\n interface Blob {}\n}\n\n/**\n * Wrapper object for http request and response. Deserialized object is stored in\n * the `parsedBody` property when the response body is received in JSON or XML.\n */\nexport interface HttpOperationResponse extends HttpResponse {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders?: { [key: string]: any };\n\n /**\n * The response body as text (string format)\n */\n bodyAsText?: string | null;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody?: any;\n\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always undefined in node.js.\n */\n blobBody?: Promise;\n\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n}\n\n/**\n * The flattened response to a REST call.\n * Contains the underlying {@link HttpOperationResponse} as well as\n * the merged properties of the `parsedBody`, `parsedHeaders`, etc.\n */\nexport interface RestResponse {\n /**\n * The underlying HTTP response containing both raw and deserialized response data.\n */\n _response: HttpOperationResponse;\n\n /**\n * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body.\n */\n [key: string]: any;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js new file mode 100644 index 0000000..4e70194 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map new file mode 100644 index 0000000..168e6dc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.js","sourceRoot":"","sources":["../../src/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,oBAoBX;AApBD,WAAY,oBAAoB;IAC9B;;OAEG;IACH,6DAAG,CAAA;IAEH;;OAEG;IACH,iEAAK,CAAA;IAEL;;OAEG;IACH,qEAAO,CAAA;IAEP;;OAEG;IACH,+DAAI,CAAA;AACN,CAAC,EApBW,oBAAoB,KAApB,oBAAoB,QAoB/B","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js new file mode 100644 index 0000000..6f61f81 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export class ConsoleHttpPipelineLogger { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel) { + this.minimumLogLevel = minimumLogLevel; + } + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel, message) { + const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + } +} +//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map new file mode 100644 index 0000000..f4119d4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.js","sourceRoot":"","sources":["../../src/httpPipelineLogger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAoB9D;;GAEG;AACH,MAAM,OAAO,yBAAyB;IACpC;;;OAGG;IACH,YAAmB,eAAqC;QAArC,oBAAe,GAAf,eAAe,CAAsB;IAAG,CAAC;IAE5D;;;;OAIG;IACH,GAAG,CAAC,QAA8B,EAAE,OAAe;QACjD,MAAM,UAAU,GAAG,GAAG,oBAAoB,CAAC,QAAQ,CAAC,KAAK,OAAO,EAAE,CAAC;QACnE,QAAQ,QAAQ,EAAE;YAChB,KAAK,oBAAoB,CAAC,KAAK;gBAC7B,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC1B,MAAM;YAER,KAAK,oBAAoB,CAAC,OAAO;gBAC/B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACzB,MAAM;YAER,KAAK,oBAAoB,CAAC,IAAI;gBAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;gBACxB,MAAM;SACT;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpPipelineLogLevel } from \"./httpPipelineLogLevel\";\n\n/**\n * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages\n * that can be used for debugging purposes.\n */\nexport interface HttpPipelineLogger {\n /**\n * The log level threshold for what logs will be logged.\n */\n minimumLogLevel: HttpPipelineLogLevel;\n\n /**\n * Log the provided message.\n * @param logLevel - The HttpLogDetailLevel associated with this message.\n * @param message - The message to log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * A HttpPipelineLogger that will send its logs to the console.\n */\nexport class ConsoleHttpPipelineLogger implements HttpPipelineLogger {\n /**\n * Create a new ConsoleHttpPipelineLogger.\n * @param minimumLogLevel - The log level threshold for what logs will be logged.\n */\n constructor(public minimumLogLevel: HttpPipelineLogLevel) {}\n\n /**\n * Log the provided message.\n * @param logLevel - The HttpLogDetailLevel associated with this message.\n * @param message - The message to log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void {\n const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`;\n switch (logLevel) {\n case HttpPipelineLogLevel.ERROR:\n console.error(logMessage);\n break;\n\n case HttpPipelineLogLevel.WARNING:\n console.warn(logMessage);\n break;\n\n case HttpPipelineLogLevel.INFO:\n console.log(logMessage);\n break;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/index.js b/node_modules/@azure/core-http/dist-esm/src/index.js new file mode 100644 index 0000000..f0a3a05 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/index.js @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */ +/// +export { WebResource, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpHeaders } from "./httpHeaders"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { ServiceClient, flattenResponse, createPipelineFromOptions, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy } from "./policies/redirectPolicy"; +export { keepAlivePolicy } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { tracingPolicy } from "./policies/tracingPolicy"; +export { MapperType, Serializer, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isDuration, } from "./util/utils"; +export { isNode } from "@azure/core-util"; +export { URLBuilder, URLQuery } from "./url"; +export { delay } from "@azure/core-util"; +// legacy exports. Use core-tracing instead (and remove on next major version update of core-http). +export { createSpanFunction } from "./createSpanLegacy"; +// Credentials +export { isTokenCredential } from "@azure/core-auth"; +export { ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/index.js.map b/node_modules/@azure/core-http/dist-esm/src/index.js.map new file mode 100644 index 0000000..4bdf725 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,wEAAwE;AACxE,yCAAyC;AAEzC,OAAO,EACL,WAAW,GAQZ,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAc,WAAW,EAAmC,MAAM,eAAe,CAAC;AAGzF,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAGL,oCAAoC,GACrC,MAAM,oBAAoB,CAAC;AAS5B,OAAO,EACL,aAAa,EAEb,eAAe,EACf,yBAAyB,GAG1B,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAoB,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EACL,iBAAiB,EAGjB,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAgB,SAAS,EAAE,MAAM,mCAAmC,CAAC;AACpG,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,cAAc,EAAmB,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAoB,MAAM,4BAA4B,CAAC;AAC/E,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,eAAe,EACf,wBAAwB,GAGzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,qBAAqB,EAErB,uBAAuB,GAExB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAwB,MAAM,0BAA0B,CAAC;AAC/E,OAAO,EACL,UAAU,EAcV,UAAU,EAEV,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EAET,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAE7C,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACzC,mGAAmG;AACnG,OAAO,EAAE,kBAAkB,EAAc,MAAM,oBAAoB,CAAC;AAEpE,cAAc;AACd,OAAO,EAAiD,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpG,OAAO,EAAoB,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAC5F,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAGlE,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,WAAW,EAAqB,MAAM,0BAA0B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */\n/// \n\nexport {\n WebResource,\n WebResourceLike,\n HttpRequestBody,\n RequestPrepareOptions,\n HttpMethods,\n ParameterValue,\n RequestOptionsBase,\n TransferProgressEvent,\n} from \"./webResource\";\nexport { CommonResponse, CommonRequestInit, CommonRequestInfo } from \"./nodeFetchHttpClient\";\nexport { DefaultHttpClient } from \"./defaultHttpClient\";\nexport { HttpClient } from \"./httpClient\";\nexport { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from \"./httpHeaders\";\nexport { HttpOperationResponse, HttpResponse, RestResponse } from \"./httpOperationResponse\";\nexport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nexport { HttpPipelineLogLevel } from \"./httpPipelineLogLevel\";\nexport { RestError } from \"./restError\";\nexport { OperationArguments } from \"./operationArguments\";\nexport {\n OperationOptions,\n OperationRequestOptions,\n operationOptionsToRequestOptionsBase,\n} from \"./operationOptions\";\nexport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n ParameterPath,\n} from \"./operationParameter\";\nexport { OperationResponse } from \"./operationResponse\";\nexport { OperationSpec } from \"./operationSpec\";\nexport {\n ServiceClient,\n ServiceClientOptions,\n flattenResponse,\n createPipelineFromOptions,\n ProxySettings,\n ProxyOptions,\n} from \"./serviceClient\";\nexport { PipelineOptions, InternalPipelineOptions } from \"./pipelineOptions\";\nexport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nexport { Constants } from \"./util/constants\";\nexport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nexport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nexport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nexport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nexport { exponentialRetryPolicy, RetryOptions, RetryMode } from \"./policies/exponentialRetryPolicy\";\nexport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nexport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nexport { getDefaultProxySettings, proxyPolicy } from \"./policies/proxyPolicy\";\nexport { redirectPolicy, RedirectOptions } from \"./policies/redirectPolicy\";\nexport { keepAlivePolicy, KeepAliveOptions } from \"./policies/keepAlivePolicy\";\nexport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nexport { signingPolicy } from \"./policies/signingPolicy\";\nexport {\n userAgentPolicy,\n getDefaultUserAgentValue,\n UserAgentOptions,\n TelemetryInfo,\n} from \"./policies/userAgentPolicy\";\nexport {\n deserializationPolicy,\n DeserializationOptions,\n deserializeResponseBody,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nexport { tracingPolicy, TracingPolicyOptions } from \"./policies/tracingPolicy\";\nexport {\n MapperType,\n SimpleMapperType,\n CompositeMapperType,\n DictionaryMapperType,\n SequenceMapperType,\n EnumMapperType,\n Mapper,\n BaseMapper,\n CompositeMapper,\n SequenceMapper,\n DictionaryMapper,\n EnumMapper,\n MapperConstraints,\n PolymorphicDiscriminator,\n Serializer,\n UrlParameterValue,\n serializeObject,\n} from \"./serializer\";\nexport {\n stripRequest,\n stripResponse,\n executePromisesSequentially,\n generateUuid,\n encodeUri,\n ServiceCallback,\n promiseToCallback,\n promiseToServiceCallback,\n isValidUuid,\n applyMixins,\n isDuration,\n} from \"./util/utils\";\nexport { isNode } from \"@azure/core-util\";\nexport { URLBuilder, URLQuery } from \"./url\";\nexport { AbortSignalLike } from \"@azure/abort-controller\";\nexport { delay } from \"@azure/core-util\";\n// legacy exports. Use core-tracing instead (and remove on next major version update of core-http).\nexport { createSpanFunction, SpanConfig } from \"./createSpanLegacy\";\n\n// Credentials\nexport { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from \"@azure/core-auth\";\nexport { AccessTokenCache, ExpiringAccessTokenCache } from \"./credentials/accessTokenCache\";\nexport { AccessTokenRefresher } from \"./credentials/accessTokenRefresher\";\nexport { BasicAuthenticationCredentials } from \"./credentials/basicAuthenticationCredentials\";\nexport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./credentials/apiKeyCredentials\";\nexport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nexport { TopicCredentials } from \"./credentials/topicCredentials\";\nexport { Authenticator } from \"./credentials/credentials\";\n\nexport { parseXML, stringifyXML } from \"./util/xml\";\nexport { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from \"./util/serializer.common\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js b/node_modules/@azure/core-http/dist-esm/src/log.js new file mode 100644 index 0000000..10dd422 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-http"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js.map b/node_modules/@azure/core-http/dist-esm/src/log.js.map new file mode 100644 index 0000000..5260798 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AACnD,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,WAAW,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { createClientLogger } from \"@azure/logger\";\nexport const logger = createClientLogger(\"core-http\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js new file mode 100644 index 0000000..8f1d6ce --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js @@ -0,0 +1,288 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as http from "http"; +import * as https from "https"; +import { AbortController, AbortError } from "@azure/abort-controller"; +import { HttpHeaders } from "./httpHeaders"; +import { createProxyAgent, isUrlHttps } from "./proxyAgent"; +import { Transform } from "stream"; +import FormData from "form-data"; +import { RestError } from "./restError"; +import { logger } from "./log"; +import node_fetch from "node-fetch"; +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} +export class ReportTransform extends Transform { + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + this.loadedBytes = 0; + } + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(undefined); + } +} +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +export class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + async sendRequest(httpRequest) { + var _a; + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + const abortController = new AbortController(); + let abortListener; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(() => { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + const formData = httpRequest.formData; + const requestForm = new FormData(); + const appendFormValue = (key, value) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + const onUploadProgress = httpRequest.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const platformSpecificRequestInit = await this.prepareRequest(httpRequest); + const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, + // the types for RequestInit are from the browser, which expects AbortSignal to + // have `reason` and `throwIfAborted`, but these don't exist on our polyfill + // for Node. + signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + let operationResponse; + try { + const response = await this.fetch(httpRequest.url, requestInit); + const headers = parseHeaders(response.headers); + const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined, + bodyAsText: !streaming ? await response.text() : undefined, + }; + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + const length = parseInt(headers.get("Content-Length")) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + await this.processRequest(operationResponse); + return operationResponse; + } + catch (error) { + const fetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new AbortError("The operation was aborted."); + } + throw fetchError; + } + finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + getOrCreateAgent(httpRequest) { + var _a; + const isHttps = isUrlHttps(httpRequest.url); + // At the moment, proxy settings and keepAlive are mutually + // exclusive because the 'tunnel' library currently lacks the + // ability to create a proxy with keepAlive turned on. + if (httpRequest.proxySettings) { + const { host, port, username, password } = httpRequest.proxySettings; + const key = `${host}:${port}:${username}:${password}`; + const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; + let agent = getCachedAgent(isHttps, proxyAgents); + if (agent) { + return agent; + } + const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + agent = tunnel.agent; + if (tunnel.isHttps) { + proxyAgents.httpsAgent = tunnel.agent; + } + else { + proxyAgents.httpAgent = tunnel.agent; + } + this.proxyAgentMap.set(key, proxyAgents); + return agent; + } + else if (httpRequest.keepAlive) { + let agent = getCachedAgent(isHttps, this.keepAliveAgents); + if (agent) { + return agent; + } + const agentOptions = { + keepAlive: httpRequest.keepAlive, + }; + if (isHttps) { + agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + } + else { + agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + } + return agent; + } + else { + return isHttps ? https.globalAgent : http.globalAgent; + } + } + /** + * Uses `node-fetch` to perform the request. + */ + // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs + async fetch(input, init) { + return node_fetch(input, init); + } + /** + * Prepares a request based on the provided web resource. + */ + async prepareRequest(httpRequest) { + const requestInit = {}; + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return requestInit; + } + /** + * Process an HTTP response. + */ + async processRequest(_operationResponse) { + /* no_op */ + } +} +//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map new file mode 100644 index 0000000..cfa827c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../src/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAc,gBAAgB,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AACxE,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAE7C,OAAO,QAAQ,MAAM,WAAW,CAAC;AAGjC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAOpC,SAAS,cAAc,CACrB,OAAgB,EAChB,UAAsB;IAEtB,OAAO,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC;AAChE,CAAC;AA+BD,MAAM,OAAO,eAAgB,SAAQ,SAAS;IAS5C,YAAoB,gBAA2D;QAC7E,KAAK,EAAE,CAAC;QADU,qBAAgB,GAAhB,gBAAgB,CAA2C;QARvE,gBAAW,GAAW,CAAC,CAAC;IAUhC,CAAC;IATD,UAAU,CAAC,KAAsB,EAAE,SAAiB,EAAE,QAA4B;QAChF,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACjB,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,gBAAiB,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,QAAQ,CAAC,SAAS,CAAC,CAAC;IACtB,CAAC;CAKF;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB,EAAE,OAAyB;IACnE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;QAC7B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,KAAK,EAAE,CAAC;YACjB,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAChC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,YAAY,CAAC,OAAgB;IAC3C,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;QAC7B,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,mBAAmB;IAAhC;QAiME,wFAAwF;QAChF,kBAAa,GAA4B,IAAI,GAAG,EAAE,CAAC;QACnD,oBAAe,GAAe,EAAE,CAAC;IAmF3C,CAAC;IArRC;;;;OAIG;IACH,KAAK,CAAC,WAAW,CAAC,WAA4B;;QAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YACnD,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAC9C,IAAI,aAAiD,CAAC;QACtD,IAAI,WAAW,CAAC,WAAW,EAAE;YAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gBACnC,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,aAAa,GAAG,CAAC,KAAY,EAAE,EAAE;gBAC/B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oBAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iBACzB;YACH,CAAC,CAAC;YACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;SAClE;QAED,IAAI,WAAW,CAAC,OAAO,EAAE;YACvB,UAAU,CAAC,GAAG,EAAE;gBACd,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;SACzB;QAED,IAAI,WAAW,CAAC,QAAQ,EAAE;YACxB,MAAM,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;YAC3C,MAAM,WAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,EAAQ,EAAE;gBACxD,0FAA0F;gBAC1F,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oBAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iBACjB;gBACD,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;oBACA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBAC3C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,WAAW,CAAC,IAAI,GAAG,WAAW,CAAC;YAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;YACjC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oBACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,iCAAiC,WAAW,CAAC,WAAW,EAAE,EAAE,CAC7D,CAAC;iBACH;qBAAM;oBACL,kEAAkE;oBAClE,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iBAC5C;aACF;SACF;QAED,IAAI,IAAI,GAAG,WAAW,CAAC,IAAI;YACzB,CAAC,CAAC,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;gBACtC,CAAC,CAAC,WAAW,CAAC,IAAI,EAAE;gBACpB,CAAC,CAAC,WAAW,CAAC,IAAI;YACpB,CAAC,CAAC,SAAS,CAAC;QACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;YACpD,MAAM,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;YACtD,MAAM,kBAAkB,GAAG,IAAI,eAAe,CAAC,gBAAgB,CAAC,CAAC;YACjE,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gBAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;aAC/B;iBAAM;gBACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI,GAAG,kBAAkB,CAAC;SAC3B;QAED,MAAM,2BAA2B,GAAyB,MAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,CAAC;QAEF,MAAM,WAAW,mBACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM;YAC1B,+EAA+E;YAC/E,4EAA4E;YAC5E,YAAY;YACZ,MAAM,EAAE,eAAe,CAAC,MAAa,EACrC,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;QAEF,IAAI,iBAAoD,CAAC;QACzD,IAAI;YACF,MAAM,QAAQ,GAAmB,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEhF,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAE/C,MAAM,SAAS,GACb,CAAA,MAAA,WAAW,CAAC,yBAAyB,0CAAE,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC3D,WAAW,CAAC,kBAAkB,CAAC;YAEjC,iBAAiB,GAAG;gBAClB,OAAO,EAAE,OAAO;gBAChB,OAAO,EAAE,WAAW;gBACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;gBACvB,kBAAkB,EAAE,SAAS;oBAC3B,CAAC,CAAE,QAAQ,CAAC,IAAyC;oBACrD,CAAC,CAAC,SAAS;gBACb,UAAU,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,SAAS;aAC3D,CAAC;YAEF,MAAM,kBAAkB,GAAG,WAAW,CAAC,kBAAkB,CAAC;YAC1D,IAAI,kBAAkB,EAAE;gBACtB,MAAM,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;gBAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;oBAClC,MAAM,oBAAoB,GAAG,IAAI,eAAe,CAAC,kBAAkB,CAAC,CAAC;oBACrE,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;oBACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;iBAC7D;qBAAM;oBACL,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;oBACrE,IAAI,MAAM,EAAE;wBACV,wEAAwE;wBACxE,kBAAkB,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;qBAC7C;iBACF;aACF;YAED,MAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAC;YAE7C,OAAO,iBAAiB,CAAC;SAC1B;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,UAAU,GAAe,KAAK,CAAC;YACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;gBACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;gBACxC,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,MAAM,UAAU,CAAC;SAClB;gBAAS;YACR,0BAA0B;YAC1B,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;gBAC5C,IAAI,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;oBAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;iBAC3C;gBACD,IAAI,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;oBAC3D,kBAAkB,GAAG,gBAAgB,CACnC,iBAAkB,CAAC,kBAAkB,EACrC,eAAe,CAChB,CAAC;iBACH;gBAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;qBAChD,IAAI,CAAC,GAAG,EAAE;;oBACT,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,CAAC,CAAC;oBACtE,OAAO;gBACT,CAAC,CAAC;qBACD,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;oBACX,MAAM,CAAC,OAAO,CAAC,qDAAqD,EAAE,CAAC,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;aACN;SACF;IACH,CAAC;IAMO,gBAAgB,CAAC,WAA4B;;QACnD,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QAE5C,2DAA2D;QAC3D,6DAA6D;QAC7D,sDAAsD;QACtD,IAAI,WAAW,CAAC,aAAa,EAAE;YAC7B,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC,aAAa,CAAC;YACrE,MAAM,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,QAAQ,IAAI,QAAQ,EAAE,CAAC;YACtD,MAAM,WAAW,GAAG,MAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,mCAAI,EAAE,CAAC;YAEtD,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YACjD,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;YAED,MAAM,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;YAEF,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;YACrB,IAAI,MAAM,CAAC,OAAO,EAAE;gBAClB,WAAW,CAAC,UAAU,GAAG,MAAM,CAAC,KAAoB,CAAC;aACtD;iBAAM;gBACL,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;aACtC;YACD,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEzC,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,WAAW,CAAC,SAAS,EAAE;YAChC,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;YAC1D,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;YAED,MAAM,YAAY,GAA2C;gBAC3D,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;YAEF,IAAI,OAAO,EAAE;gBACX,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACzE;iBAAM;gBACL,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACvE;YAED,OAAO,KAAK,CAAC;SACd;aAAM;YACL,OAAO,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;SACvD;IACH,CAAC;IAED;;OAEG;IACH,6EAA6E;IAC7E,KAAK,CAAC,KAAK,CAAC,KAAwB,EAAE,IAAwB;QAC5D,OAAO,UAAU,CAAC,KAAK,EAAE,IAAI,CAAuC,CAAC;IACvE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAAC,WAA4B;QAC/C,MAAM,WAAW,GAA+D,EAAE,CAAC;QAEnF,wBAAwB;QACxB,WAAW,CAAC,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;QAEvD,WAAW,CAAC,QAAQ,GAAG,WAAW,CAAC,kBAAkB,CAAC;QAEtD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAAC,kBAAyC;QAC5D,WAAW;IACb,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport { AbortController, AbortError } from \"@azure/abort-controller\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxyAgent, createProxyAgent, isUrlHttps } from \"./proxyAgent\";\nimport { Readable, Transform } from \"stream\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport FormData from \"form-data\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\nimport { logger } from \"./log\";\nimport node_fetch from \"node-fetch\";\n\ninterface AgentCache {\n httpAgent?: http.Agent;\n httpsAgent?: https.Agent;\n}\n\nfunction getCachedAgent(\n isHttps: boolean,\n agentCache: AgentCache\n): http.Agent | https.Agent | undefined {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\n/**\n * String URLs used when calling to `fetch()`.\n */\nexport type CommonRequestInfo = string;\n\n/**\n * An object containing information about the outgoing HTTP request.\n */\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\n/**\n * An object containing information about the incoming HTTP response.\n */\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport class ReportTransform extends Transform {\n private loadedBytes: number = 0;\n _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback!({ loadedBytes: this.loadedBytes });\n callback(undefined);\n }\n\n constructor(private progressCallback: (progress: TransferProgressEvent) => void) {\n super();\n }\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable, aborter?: AbortController): Promise {\n return new Promise((resolve) => {\n stream.once(\"close\", () => {\n aborter?.abort();\n resolve();\n });\n stream.once(\"end\", resolve);\n stream.once(\"error\", resolve);\n });\n}\n\n/**\n * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike}\n */\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n\n/**\n * An HTTP client that uses `node-fetch`.\n */\nexport class NodeFetchHttpClient implements HttpClient {\n /**\n * Provides minimum viable error handling and the logic that executes the abstract methods.\n * @param httpRequest - Object representing the outgoing HTTP request.\n * @returns An object representing the incoming HTTP response.\n */\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n const onUploadProgress = httpRequest.onUploadProgress;\n const uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n // the types for RequestInit are from the browser, which expects AbortSignal to\n // have `reason` and `throwIfAborted`, but these don't exist on our polyfill\n // for Node.\n signal: abortController.signal as any,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n\n const streaming =\n httpRequest.streamResponseStatusCodes?.has(response.status) ||\n httpRequest.streamResponseBody;\n\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? (response.body as unknown as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !streaming ? await response.text() : undefined,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n const downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error: any) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(\n operationResponse!.readableStreamBody,\n abortController\n );\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((e) => {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n }\n }\n\n // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent\n private proxyAgentMap: Map = new Map();\n private keepAliveAgents: AgentCache = {};\n\n private getOrCreateAgent(httpRequest: WebResourceLike): http.Agent | https.Agent {\n const isHttps = isUrlHttps(httpRequest.url);\n\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n const { host, port, username, password } = httpRequest.proxySettings;\n const key = `${host}:${port}:${username}:${password}`;\n const proxyAgents = this.proxyAgentMap.get(key) ?? {};\n\n let agent = getCachedAgent(isHttps, proxyAgents);\n if (agent) {\n return agent;\n }\n\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n proxyAgents.httpsAgent = tunnel.agent as https.Agent;\n } else {\n proxyAgents.httpAgent = tunnel.agent;\n }\n this.proxyAgentMap.set(key, proxyAgents);\n\n return agent;\n } else if (httpRequest.keepAlive) {\n let agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n\n const agentOptions: http.AgentOptions | https.AgentOptions = {\n keepAlive: httpRequest.keepAlive,\n };\n\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n } else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n\n return agent;\n } else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n }\n\n /**\n * Uses `node-fetch` to perform the request.\n */\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return node_fetch(input, init) as unknown as Promise;\n }\n\n /**\n * Prepares a request based on the provided web resource.\n */\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n\n requestInit.compress = httpRequest.decompressResponse;\n\n return requestInit;\n }\n\n /**\n * Process an HTTP response.\n */\n async processRequest(_operationResponse: HttpOperationResponse): Promise {\n /* no_op */\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js new file mode 100644 index 0000000..21722f3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map new file mode 100644 index 0000000..388d777 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.js","sourceRoot":"","sources":["../../src/operationArguments.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase } from \"./webResource\";\n\n/**\n * A collection of properties that apply to a single invocation of an operation.\n */\nexport interface OperationArguments {\n /**\n * The parameters that were passed to the operation method.\n */\n [parameterName: string]: any;\n\n /**\n * The optional arugments that are provided to an operation.\n */\n options?: RequestOptionsBase;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js new file mode 100644 index 0000000..041c56b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __rest } from "tslib"; +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export function operationOptionsToRequestOptionsBase(opts) { + const { requestOptions, tracingOptions } = opts, additionalOptions = __rest(opts, ["requestOptions", "tracingOptions"]); + let result = additionalOptions; + if (requestOptions) { + result = Object.assign(Object.assign({}, result), requestOptions); + } + if (tracingOptions) { + result.tracingContext = tracingOptions.tracingContext; + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + } + return result; +} +//# sourceMappingURL=operationOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map new file mode 100644 index 0000000..293a7ad --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationOptions.js","sourceRoot":"","sources":["../../src/operationOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAuDlC;;;;GAIG;AACH,MAAM,UAAU,oCAAoC,CAClD,IAAO;IAEP,MAAM,EAAE,cAAc,EAAE,cAAc,KAA2B,IAAI,EAA1B,iBAAiB,UAAK,IAAI,EAA/D,oCAAwD,CAAO,CAAC;IAEtE,IAAI,MAAM,GAAuB,iBAAiB,CAAC;IAEnD,IAAI,cAAc,EAAE;QAClB,MAAM,mCAAQ,MAAM,GAAK,cAAc,CAAE,CAAC;KAC3C;IAED,IAAI,cAAc,EAAE;QAClB,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC,cAAc,CAAC;QACtD,+HAA+H;QAC/H,MAAM,CAAC,WAAW,GAAI,cAAsB,aAAtB,cAAc,uBAAd,cAAc,CAAU,WAAW,CAAC;KAC3D;IAED,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase, TransferProgressEvent } from \"./webResource\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * The base options type for all operations.\n */\nexport interface OperationOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: OperationRequestOptions;\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * Options that allow configuring the handling of HTTP requests made by an SDK operation.\n */\nexport interface OperationRequestOptions {\n /**\n * User defined custom request headers that will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n}\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nexport function operationOptionsToRequestOptionsBase(\n opts: T\n): RequestOptionsBase {\n const { requestOptions, tracingOptions, ...additionalOptions } = opts;\n\n let result: RequestOptionsBase = additionalOptions;\n\n if (requestOptions) {\n result = { ...result, ...requestOptions };\n }\n\n if (tracingOptions) {\n result.tracingContext = tracingOptions.tracingContext;\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n result.spanOptions = (tracingOptions as any)?.spanOptions;\n }\n\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js new file mode 100644 index 0000000..1361e29 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +export function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map new file mode 100644 index 0000000..f3a736a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.js","sourceRoot":"","sources":["../../src/operationParameter.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAqDlC;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,MAAM,UAAU,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\n\n/**\n * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values.\n */\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js new file mode 100644 index 0000000..6781aac --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map new file mode 100644 index 0000000..bd4a139 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.js","sourceRoot":"","sources":["../../src/operationResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\n\n/**\n * An OperationResponse that can be returned from an operation request for a single status code.\n */\nexport interface OperationResponse {\n /**\n * The mapper that will be used to deserialize the response headers.\n */\n headersMapper?: Mapper;\n\n /**\n * The mapper that will be used to deserialize the response body.\n */\n bodyMapper?: Mapper;\n\n /**\n * Indicates if this is an error response\n */\n isError?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js new file mode 100644 index 0000000..2ba9e6c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { MapperType } from "./serializer"; +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map new file mode 100644 index 0000000..2843db2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.js","sourceRoot":"","sources":["../../src/operationSpec.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAc,MAAM,cAAc,CAAC;AA8FtD;;;GAGG;AACH,MAAM,UAAU,4BAA4B,CAAC,aAA4B;IACvE,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAC;IACjC,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,MAAM,iBAAiB,GAAG,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC9D,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;SAChC;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { MapperType, Serializer } from \"./serializer\";\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { HttpMethods } from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\n\n/**\n * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The media type of the request body.\n * This value can be used to aide in serialization if it is provided.\n */\n readonly mediaType?:\n | \"json\"\n | \"xml\"\n | \"form\"\n | \"binary\"\n | \"multipart\"\n | \"text\"\n | \"unknown\"\n | string;\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nexport function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js new file mode 100644 index 0000000..2abe14a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pipelineOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map new file mode 100644 index 0000000..d1dc2e2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pipelineOptions.js","sourceRoot":"","sources":["../../src/pipelineOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DeserializationOptions } from \"./policies/deserializationPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { KeepAliveOptions } from \"./policies/keepAlivePolicy\";\nimport { LogPolicyOptions } from \"./policies/logPolicy\";\nimport { ProxyOptions } from \"./serviceClient\";\nimport { RedirectOptions } from \"./policies/redirectPolicy\";\nimport { RetryOptions } from \"./policies/exponentialRetryPolicy\";\nimport { UserAgentOptions } from \"./policies/userAgentPolicy\";\n\n/**\n * Defines options that are used to configure the HTTP pipeline for\n * an SDK client.\n */\nexport interface PipelineOptions {\n /**\n * The HttpClient implementation to use for outgoing HTTP requests. Defaults\n * to DefaultHttpClient.\n */\n httpClient?: HttpClient;\n\n /**\n * Options that control how to retry failed requests.\n */\n retryOptions?: RetryOptions;\n\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n\n /**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\n keepAliveOptions?: KeepAliveOptions;\n\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n}\n\n/**\n * Defines options that are used to configure internal options of\n * the HTTP pipeline for an SDK client.\n */\nexport interface InternalPipelineOptions extends PipelineOptions {\n /**\n * Options to configure API response deserialization.\n */\n deserializationOptions?: DeserializationOptions;\n\n /**\n * Options to configure request/response logging.\n */\n loggingOptions?: LogPolicyOptions;\n\n /**\n * Configure whether to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n\n /**\n * Send JSON Array payloads as NDJSON.\n */\n sendStreamingJson?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 0000000..37d2d9e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "../policies/requestPolicy"; +import { Constants } from "../util/constants"; +import { delay } from "@azure/core-util"; +// Default options for the cycler if none are provided +export const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map new file mode 100644 index 0000000..973bfda --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bearerTokenAuthenticationPolicy.js","sourceRoot":"","sources":["../../../src/policies/bearerTokenAuthenticationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EACL,iBAAiB,GAIlB,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAiCzC,sDAAsD;AACtD,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,uBAAuB,EAAE,IAAI;IAC7B,iBAAiB,EAAE,IAAI;IACvB,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC,EAAE,oCAAoC;CACvE,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB;IAEnB,4EAA4E;IAC5E,eAAe;IACf,KAAK,UAAU,iBAAiB;QAC9B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;aAC/B;YAAC,WAAM;gBACN,OAAO,IAAI,CAAC;aACb;SACF;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;YAE1C,6CAA6C;YAC7C,IAAI,UAAU,KAAK,IAAI,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,OAAO,UAAU,CAAC;SACnB;IACH,CAAC;IAED,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;QACrB,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;QAE/B,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;KACnC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;IAErC,MAAM,OAAO,mCACR,sBAAsB,GACtB,kBAAkB,CACtB,CAAC;IAEF;;;OAGG;IACH,MAAM,MAAM,GAAG;QACb;;WAEG;QACH,IAAI,YAAY;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;QAChC,CAAC;QACD;;;WAGG;QACH,IAAI,aAAa;;YACf,OAAO,CACL,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,CAAC,CAAC,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1E,CAAC;QACJ,CAAC;QACD;;;WAGG;QACH,IAAI,WAAW;YACb,OAAO,CACL,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1F,CAAC;QACJ,CAAC;KACF,CAAC;IAEF;;;OAGG;IACH,SAAS,OAAO,CAAC,eAAgC;;QAC/C,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;YACxB,yDAAyD;YACzD,MAAM,iBAAiB,GAAG,GAAgC,EAAE,CAC1D,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/C,wEAAwE;YACxE,6CAA6C;YAC7C,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;YACzB,+DAA+D;YAC/D,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,IAAI,CAAC,GAAG,EAAE,CACxC;iBACE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC,CAAC;iBACD,KAAK,CAAC,CAAC,MAAM,EAAE,EAAE;gBAChB,sEAAsE;gBACtE,qEAAqE;gBACrE,mBAAmB;gBACnB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;gBACb,MAAM,MAAM,CAAC;YACf,CAAC,CAAC,CAAC;SACN;QAED,OAAO,aAAqC,CAAC;IAC/C,CAAC;IAED,OAAO,KAAK,EAAE,YAA6B,EAAwB,EAAE;QACnE,EAAE;QACF,gBAAgB;QAChB,+DAA+D;QAC/D,6CAA6C;QAC7C,+DAA+D;QAC/D,yCAAyC;QACzC,6DAA6D;QAC7D,YAAY;QACZ,EAAE;QAEF,IAAI,MAAM,CAAC,WAAW;YAAE,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;SACvB;QAED,OAAO,KAAoB,CAAC;IAC9B,CAAC,CAAC;AACJ,CAAC;AAED,aAAa;AAEb;;;;;;GAMG;AACH,MAAM,UAAU,+BAA+B,CAC7C,UAA2B,EAC3B,MAAyB;IAEzB,wFAAwF;IACxF,MAAM,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,eAAe,CAAC,CAAC;IAEvE,MAAM,+BAAgC,SAAQ,iBAAiB;QAC7D,YAAmB,UAAyB,EAAE,OAA6B;YACzE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC7B,CAAC;QAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;YACnD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;gBACzD,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;aACH;YAED,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,QAAQ,CAAC;gBAC/B,WAAW,EAAE,WAAW,CAAC,WAAW;gBACpC,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;iBAC3C;aACF,CAAC,CAAC;YACH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,UAAU,KAAK,EAAE,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;QACnD,CAAC;KACF;IAED,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,+BAA+B,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAClE,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"../policies/requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nexport const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nexport function bearerTokenAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n const getToken = createTokenCycler(credential, scopes /* , options */);\n\n class BearerTokenAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const { token } = await getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n });\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js new file mode 100644 index 0000000..5f9bd8b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { XML_CHARKEY } from "../util/serializer.common"; +import { MapperType } from "../serializer"; +import { RestError } from "../restError"; +import { parseXML } from "../util/xml"; +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy(deserializationContentTypes, parsingOptions) { + return { + create: (nextPolicy, options) => { + return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); + }, + }; +} +export const defaultJsonContentTypes = ["application/json", "text/json"]; +export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +export const DefaultDeserializationOptions = { + expectedContentTypes: { + json: defaultJsonContentTypes, + xml: defaultXmlContentTypes, + }, +}; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export class DeserializationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { + var _a; + super(nextPolicy, requestPolicyOptions); + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + } + async sendRequest(request) { + return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { + xmlCharKey: this.xmlCharKey, + })); + } +} +function getOperationResponse(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationSpec = request.operationSpec; + if (operationSpec) { + const operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const shouldDeserialize = parsedResponse.request.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationSpec = parsedResponse.request.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponse(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (innerError) { + const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders", options); + } + } + return parsedResponse; + }); +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + parsedResponse.request.streamResponseBody; + const initialErrorMessage = streaming + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let parsedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; + } + parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); + } + const internalError = parsedBody.error || parsedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = parsedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + var _a; + const errorHandler = (err) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + return Promise.reject(e); + }; + const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + operationResponse.request.streamResponseBody; + if (!streaming && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text, opts) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map new file mode 100644 index 0000000..ef1be8e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.js","sourceRoot":"","sources":["../../../src/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAqB,WAAW,EAAE,MAAM,2BAA2B,CAAC;AAE3E,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAG3C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AA+BvC;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,2BAAyD,EACzD,cAAkC;IAElC,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,qBAAqB,CAC9B,UAAU,EACV,OAAO,EACP,2BAA2B,EAC3B,cAAc,CACf,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,MAAM,CAAC,MAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF,MAAM,CAAC,MAAM,6BAA6B,GAA2B;IACnE,oBAAoB,EAAE;QACpB,IAAI,EAAE,uBAAuB;QAC7B,GAAG,EAAE,sBAAsB;KAC5B;CACF,CAAC;AAEF;;;GAGG;AACH,MAAM,OAAO,qBAAsB,SAAQ,iBAAiB;IAK1D,YACE,UAAyB,EACzB,oBAA0C,EAC1C,2BAAyD,EACzD,iBAAoC,EAAE;;QAEtC,KAAK,CAAC,UAAU,EAAE,oBAAoB,CAAC,CAAC;QAExC,IAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,CAAC,IAAI,uBAAuB,CAAC;QAC/F,IAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,CAAC,IAAI,sBAAsB,CAAC;QAC7F,IAAI,CAAC,UAAU,GAAG,MAAA,cAAc,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC7D,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA+B,EAAE,EAAE,CACpF,uBAAuB,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,eAAe,EAAE,QAAQ,EAAE;YAC7E,UAAU,EAAE,IAAI,CAAC,UAAU;SAC5B,CAAC,CACH,CAAC;IACJ,CAAC;CACF;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,MAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,MAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,MAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,MAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B,EAC/B,UAA6B,EAAE;;IAE/B,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;QAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;QACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;KAC9C,CAAC;IACF,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,EAAE,cAAc,CAAC,CAAC,IAAI,CAC5E,CAAC,cAAc,EAAE,EAAE;QACjB,IAAI,CAAC,yBAAyB,CAAC,cAAc,CAAC,EAAE;YAC9C,OAAO,cAAc,CAAC;SACvB;QAED,MAAM,aAAa,GAAG,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;QAC3D,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE;YAC9C,OAAO,cAAc,CAAC;SACvB;QAED,MAAM,YAAY,GAAG,oBAAoB,CAAC,cAAc,CAAC,CAAC;QAE1D,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE,GAAG,mBAAmB,CACzD,cAAc,EACd,aAAa,EACb,YAAY,CACb,CAAC;QACF,IAAI,KAAK,EAAE;YACT,MAAM,KAAK,CAAC;SACb;aAAM,IAAI,oBAAoB,EAAE;YAC/B,OAAO,cAAc,CAAC;SACvB;QAED,oEAAoE;QACpE,sCAAsC;QACtC,IAAI,YAAY,EAAE;YAChB,IAAI,YAAY,CAAC,UAAU,EAAE;gBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;gBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBACpF,kBAAkB;wBAChB,OAAO,kBAAkB,KAAK,QAAQ;4BACpC,CAAC,CAAC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;4BAC7D,CAAC,CAAC,EAAE,CAAC;iBACV;gBACD,IAAI;oBACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,EACzB,OAAO,CACR,CAAC;iBACH;gBAAC,OAAO,UAAe,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,SAAS,UAAU,iDAAiD,cAAc,CAAC,UAAU,EAAE,EAC/F,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;oBACF,MAAM,SAAS,CAAC;iBACjB;aACF;iBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;gBAC9C,uGAAuG;gBACvG,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;aAC7E;YAED,IAAI,YAAY,CAAC,aAAa,EAAE;gBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,MAAM,EAAE,EAC/B,4BAA4B,EAC5B,OAAO,CACR,CAAC;aACH;SACF;QAED,OAAO,cAAc,CAAC;IACxB,CAAC,CACF,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,aAA4B;IACxD,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;IACjE,OAAO,CACL,mBAAmB,CAAC,MAAM,KAAK,CAAC;QAChC,CAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAC3E,CAAC;AACJ,CAAC;AAED,SAAS,mBAAmB,CAC1B,cAAqC,EACrC,aAA4B,EAC5B,YAA2C;;IAE3C,MAAM,iBAAiB,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC;IACtF,MAAM,oBAAoB,GAAY,oBAAoB,CAAC,aAAa,CAAC;QACvE,CAAC,CAAC,iBAAiB;QACnB,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;IAEnB,IAAI,oBAAoB,EAAE;QACxB,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;gBACzB,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;aACrD;SACF;aAAM;YACL,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;SACrD;KACF;IAED,MAAM,iBAAiB,GAAG,YAAY,aAAZ,YAAY,cAAZ,YAAY,GAAI,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;IAC1E,MAAM,SAAS,GACb,CAAA,MAAA,cAAc,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,cAAc,CAAC,MAAM,CAAC;QAC5E,cAAc,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC5C,MAAM,mBAAmB,GAAG,SAAS;QACnC,CAAC,CAAC,2BAA2B,cAAc,CAAC,MAAM,EAAE;QACpD,CAAC,CAAE,cAAc,CAAC,UAAqB,CAAC;IAE1C,MAAM,KAAK,GAAG,IAAI,SAAS,CACzB,mBAAmB,EACnB,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;IAEF,yFAAyF;IACzF,sDAAsD;IACtD,IAAI,CAAC,iBAAiB,EAAE;QACtB,MAAM,KAAK,CAAC;KACb;IAED,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,UAAU,CAAC;IACvD,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,aAAa,CAAC;IAE7D,IAAI;QACF,iFAAiF;QACjF,mDAAmD;QACnD,IAAI,cAAc,CAAC,UAAU,EAAE;YAC7B,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;YAC7C,IAAI,WAAW,CAAC;YAChB,IAAI,iBAAiB,EAAE;gBACrB,IAAI,kBAAkB,GAAQ,UAAU,CAAC;gBACzC,IAAI,aAAa,CAAC,KAAK,IAAI,iBAAiB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBAC9E,kBAAkB;wBAChB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,iBAAiB,CAAC,cAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;iBACvF;gBACD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAChD,iBAAiB,EACjB,kBAAkB,EAClB,2BAA2B,CAC5B,CAAC;aACH;YAED,MAAM,aAAa,GAAQ,UAAU,CAAC,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;YACzE,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;YAChC,IAAI,aAAa,CAAC,OAAO,EAAE;gBACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;aACvC;YAED,IAAI,iBAAiB,EAAE;gBACrB,KAAK,CAAC,QAAS,CAAC,UAAU,GAAG,WAAW,CAAC;aAC1C;SACF;QAED,mFAAmF;QACnF,IAAI,cAAc,CAAC,OAAO,IAAI,oBAAoB,EAAE;YAClD,KAAK,CAAC,QAAS,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAClE,oBAAoB,EACpB,cAAc,CAAC,OAAO,CAAC,MAAM,EAAE,EAC/B,4BAA4B,CAC7B,CAAC;SACH;KACF;IAAC,OAAO,YAAiB,EAAE;QAC1B,KAAK,CAAC,OAAO,GAAG,UAAU,YAAY,CAAC,OAAO,mDAAmD,cAAc,CAAC,UAAU,6BAA6B,CAAC;KACzJ;IAED,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AAChD,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC,EACxC,IAAiC;;IAEjC,MAAM,YAAY,GAAG,CAAC,GAA6B,EAAkB,EAAE;QACrE,MAAM,GAAG,GAAG,UAAU,GAAG,gDAAgD,iBAAiB,CAAC,UAAU,GAAG,CAAC;QACzG,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,MAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,CAClB,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC;IAEF,MAAM,SAAS,GACb,CAAA,MAAA,iBAAiB,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,iBAAiB,CAAC,MAAM,CAAC;QAClF,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC/C,IAAI,CAAC,SAAS,IAAI,iBAAiB,CAAC,UAAU,EAAE;QAC9C,MAAM,IAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,MAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,MAAM,iBAAiB,GAAa,CAAC,WAAW;YAC9C,CAAC,CAAC,EAAE;YACJ,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,CAAC,OAAO,EAAE,EAAE;gBACpD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;iBACxB,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE;gBACb,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;YAC3B,CAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { SerializerOptions, XML_CHARKEY } from \"../util/serializer.common\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { MapperType } from \"../serializer\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { parseXML } from \"../util/xml\";\n\n/**\n * Options to configure API response deserialization.\n */\nexport interface DeserializationOptions {\n /**\n * Configures the expected content types for the deserialization of\n * JSON and XML response bodies.\n */\n expectedContentTypes: DeserializationContentTypes;\n}\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions?: SerializerOptions\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DeserializationPolicy(\n nextPolicy,\n options,\n deserializationContentTypes,\n parsingOptions\n );\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\nexport const DefaultDeserializationOptions: DeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes,\n },\n};\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n public readonly xmlCharKey: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n requestPolicyOptions: RequestPolicyOptions,\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions: SerializerOptions = {}\n ) {\n super(nextPolicy, requestPolicyOptions);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n this.xmlCharKey = parsingOptions.xmlCharKey ?? XML_CHARKEY;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, {\n xmlCharKey: this.xmlCharKey,\n })\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\n/**\n * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}.\n * @param jsonContentTypes - Response content types to parse the body as JSON.\n * @param xmlContentTypes - Response content types to parse the body as XML.\n * @param response - HTTP Response from the pipeline.\n * @param options - Options to the serializer, mostly for configuring the XML parser if needed.\n * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}.\n */\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse,\n options: SerializerOptions = {}\n): Promise {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(\n (parsedResponse) => {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n\n const operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n\n const responseSpec = getOperationResponse(parsedResponse);\n\n const { error, shouldReturnResponse } = handleErrorResponse(\n parsedResponse,\n operationSpec,\n responseSpec\n );\n if (error) {\n throw error;\n } else if (shouldReturnResponse) {\n return parsedResponse;\n }\n\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\",\n options\n );\n } catch (innerError: any) {\n const restError = new RestError(\n `Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n throw restError;\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.toJson(),\n \"operationRes.parsedHeaders\",\n options\n );\n }\n }\n\n return parsedResponse;\n }\n );\n}\n\nfunction isOperationSpecEmpty(operationSpec: OperationSpec): boolean {\n const expectedStatusCodes = Object.keys(operationSpec.responses);\n return (\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\")\n );\n}\n\nfunction handleErrorResponse(\n parsedResponse: HttpOperationResponse,\n operationSpec: OperationSpec,\n responseSpec: OperationResponse | undefined\n): { error: RestError | null; shouldReturnResponse: boolean } {\n const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n } else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n\n const errorResponseSpec = responseSpec ?? operationSpec.responses.default;\n const streaming =\n parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ||\n parsedResponse.request.streamResponseBody;\n const initialErrorMessage = streaming\n ? `Unexpected status code: ${parsedResponse.status}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(\n initialErrorMessage,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n\n const defaultBodyMapper = errorResponseSpec.bodyMapper;\n const defaultHeadersMapper = errorResponseSpec.headersMapper;\n\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n const parsedBody = parsedResponse.parsedBody;\n let parsedError;\n if (defaultBodyMapper) {\n let valueToDeserialize: any = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName!] : [];\n }\n parsedError = operationSpec.serializer.deserialize(\n defaultBodyMapper,\n valueToDeserialize,\n \"error.response.parsedBody\"\n );\n }\n\n const internalError: any = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n\n if (defaultBodyMapper) {\n error.response!.parsedBody = parsedError;\n }\n }\n\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response!.parsedHeaders = operationSpec.serializer.deserialize(\n defaultHeadersMapper,\n parsedResponse.headers.toJson(),\n \"operationRes.parsedHeaders\"\n );\n }\n } catch (defaultError: any) {\n error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`;\n }\n\n return { error, shouldReturnResponse: false };\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse,\n opts: Required\n): Promise {\n const errorHandler = (err: Error & { code: string }): Promise => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse\n );\n return Promise.reject(e);\n };\n\n const streaming =\n operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text, opts)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js new file mode 100644 index 0000000..3bbbec5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "browser" section in package.json + */ +import { BaseRequestPolicy, } from "./requestPolicy"; +const errorMessage = "DisableResponseDecompressionPolicy is not supported in browser environment"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export function disableResponseDecompressionPolicy() { + return { + create: (_nextPolicy, _options) => { + throw new Error(errorMessage); + }, + }; +} +export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + throw new Error(errorMessage); + } + async sendRequest(_request) { + throw new Error(errorMessage); + } +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map new file mode 100644 index 0000000..c4c40b1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/disableResponseDecompressionPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,YAAY,GAAG,4EAA4E,CAAC;AAElG;;;GAGG;AACH,MAAM,UAAU,kCAAkC;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,WAA0B,EAAE,QAA8B,EAAE,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChC,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,kCAAmC,SAAQ,iBAAiB;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;IAChC,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,QAAqB;QAC5C,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;IAChC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n */\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\nconst errorMessage = \"DisableResponseDecompressionPolicy is not supported in browser environment\";\n\n/**\n * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting\n * to use it will results in error being thrown.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptions) => {\n throw new Error(errorMessage);\n },\n };\n}\n\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n throw new Error(errorMessage);\n }\n\n public async sendRequest(_request: WebResource): Promise {\n throw new Error(errorMessage);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js new file mode 100644 index 0000000..bf033e0 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=disableResponseDecompressionPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map new file mode 100644 index 0000000..5a50cd8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.js","sourceRoot":"","sources":["../../../src/policies/disableResponseDecompressionPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB;;;GAGG;AACH,MAAM,UAAU,kCAAkC;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,kCAAkC,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,MAAM,OAAO,kCAAmC,SAAQ,iBAAiB;IACvE;;;;;OAKG;IACH,uCAAuC;IACvC,wEAAwE;IACxE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,OAAO,CAAC,kBAAkB,GAAG,KAAK,CAAC;QACnC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResource): Promise {\n request.decompressResponse = false;\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js new file mode 100644 index 0000000..0df2f6b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; +import { Constants } from "../util/constants"; +import { RestError } from "../restError"; +import { delay } from "@azure/core-util"; +import { logger } from "../log"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, + }; +} +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export var RetryMode; +(function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; +})(RetryMode || (RetryMode = {})); +export const DefaultRetryOptions = { + maxRetries: DEFAULT_CLIENT_RETRY_COUNT, + retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, +}; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry(this, request, response)) + .catch((error) => retry(this, request, error.response, undefined, error)); + } +} +async function retry(policy, request, response, retryData, requestError) { + function shouldPolicyRetry(responseParam) { + const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { + return false; + } + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; + } + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval, + }, retryData, requestError); + const isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { + logger.info(`Retrying request in ${retryData.retryInterval}`); + try { + await delay(retryData.retryInterval); + const res = await policy._nextPolicy.sendRequest(request.clone()); + return retry(policy, request, res, retryData); + } + catch (err) { + return retry(policy, request, response, retryData, err); + } + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return response; + } +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map new file mode 100644 index 0000000..2fbb358 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,iCAAiC,EACjC,0BAA0B,EAC1B,6BAA6B,EAG7B,QAAQ,EACR,WAAW,EACX,eAAe,GAChB,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAN,IAAY,SAMX;AAND,WAAY,SAAS;IACnB;;;OAGG;IACH,uDAAW,CAAA;AACb,CAAC,EANW,SAAS,KAAT,SAAS,QAMpB;AA8BD,MAAM,CAAC,MAAM,mBAAmB,GAAiB;IAC/C,UAAU,EAAE,0BAA0B;IACtC,cAAc,EAAE,6BAA6B;IAC7C,iBAAiB,EAAE,iCAAiC;CACrD,CAAC;AAEF;;GAEG;AACH,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IAc3D;;;;;;;OAOG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB;QAEzB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;IACxC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAClD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;IAC9E,CAAC;CACF;AAED,KAAK,UAAU,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,iBAAiB,CAAC,aAAqC;QAC9D,MAAM,UAAU,GAAG,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,CAAC;QACzC,IAAI,UAAU,KAAK,GAAG,KAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA,EAAE;YACtF,OAAO,KAAK,CAAC;SACd;QAED,IACE,UAAU,KAAK,SAAS;YACxB,CAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;YACxC,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;YACA,OAAO,KAAK,CAAC;SACd;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,SAAS,GAAG,eAAe,CACzB;QACE,aAAa,EAAE,MAAM,CAAC,aAAa;QACnC,gBAAgB,EAAE,CAAC;QACnB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;KAC1C,EACD,SAAS,EACT,YAAY,CACb,CAAC;IAEF,MAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;QACxF,MAAM,CAAC,IAAI,CAAC,uBAAuB,SAAS,CAAC,aAAa,EAAE,CAAC,CAAC;QAC9D,IAAI;YACF,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;YAClE,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC;SAC/C;QAAC,OAAO,GAAQ,EAAE;YACjB,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;SACzD;KACF;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;QACjD,qFAAqF;QACrF,MAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,MAAM,GAAG,CAAC;KACX;SAAM;QACL,OAAO,QAAQ,CAAC;KACjB;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\nimport { logger } from \"../log\";\n\n/**\n * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time.\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - Base time between retries.\n * @param maxRetryInterval - Maximum time to wait between retries.\n */\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * Describes the Retry Mode type. Currently supporting only Exponential.\n */\nexport enum RetryMode {\n /**\n * Currently supported retry mode.\n * Each time a retry happens, it will take exponentially more time than the last time.\n */\n Exponential,\n}\n\n/**\n * Options that control how to retry failed requests.\n */\nexport interface RetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n\n /**\n * The amount of delay in milliseconds between retry attempts. Defaults to 30000\n * (30 seconds). The delay increases exponentially with each retry up to a maximum\n * specified by maxRetryDelayInMs.\n */\n retryDelayInMs?: number;\n\n /**\n * The maximum delay in milliseconds allowed before retrying an operation. Defaults\n * to 90000 (90 seconds).\n */\n maxRetryDelayInMs?: number;\n\n /**\n * Currently supporting only Exponential mode.\n */\n mode?: RetryMode;\n}\n\nexport const DefaultRetryOptions: RetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n};\n\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\nasync function retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n function shouldPolicyRetry(responseParam?: HttpOperationResponse): boolean {\n const statusCode = responseParam?.status;\n if (statusCode === 503 && response?.headers.get(Constants.HeaderConstants.RETRY_AFTER)) {\n return false;\n }\n\n if (\n statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n return true;\n }\n\n retryData = updateRetryData(\n {\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval,\n },\n retryData,\n requestError\n );\n\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) {\n logger.info(`Retrying request in ${retryData.retryInterval}`);\n try {\n await delay(retryData.retryInterval);\n const res = await policy._nextPolicy.sendRequest(request.clone());\n return retry(policy, request, res, retryData);\n } catch (err: any) {\n return retry(policy, request, response, retryData, err);\n }\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n throw err;\n } else {\n return response;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js new file mode 100644 index 0000000..868c1b7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + create: (nextPolicy, options) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _requestIdHeaderName) { + super(nextPolicy, options); + this._requestIdHeaderName = _requestIdHeaderName; + } + sendRequest(request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, request.requestId); + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map new file mode 100644 index 0000000..a05940d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.js","sourceRoot":"","sources":["../../../src/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB;;;GAGG;AACH,MAAM,UAAU,6BAA6B,CAC3C,mBAAmB,GAAG,wBAAwB;IAE9C,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,6BAA8B,SAAQ,iBAAiB;IAClE,YACE,UAAyB,EACzB,OAA6B,EACrB,oBAA4B;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFnB,yBAAoB,GAApB,oBAAoB,CAAQ;IAGtC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;SACnE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that assigns a unique request id to outgoing requests.\n * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request.\n */\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js new file mode 100644 index 0000000..2258180 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * By default, HTTP connections are maintained for future requests. + */ +export const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=keepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map new file mode 100644 index 0000000..4cec380 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keepAlivePolicy.js","sourceRoot":"","sources":["../../../src/policies/keepAlivePolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAgBzB;;GAEG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAqB;IACvD,MAAM,EAAE,IAAI;CACb,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,gBAAmC;IACjE,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,gBAAgB,IAAI,uBAAuB,CAAC,CAAC;QAC/F,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IACpD;;;;;;OAMG;IACH,YACE,UAAyB,EACzB,OAA6B,EACZ,gBAAkC;QAEnD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFV,qBAAgB,GAAhB,gBAAgB,CAAkB;IAGrD,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACjD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\nexport interface KeepAliveOptions {\n /**\n * When true, connections will be kept alive for multiple requests.\n * Defaults to true.\n */\n enable: boolean;\n}\n\n/**\n * By default, HTTP connections are maintained for future requests.\n */\nexport const DefaultKeepAliveOptions: KeepAliveOptions = {\n enable: true,\n};\n\n/**\n * Creates a policy that controls whether HTTP connections are maintained on future requests.\n * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests.\n * @returns An instance of the {@link KeepAlivePolicy}\n */\nexport function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n },\n };\n}\n\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nexport class KeepAlivePolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private readonly keepAliveOptions: KeepAliveOptions\n ) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResourceLike): Promise {\n request.keepAlive = this.keepAliveOptions.enable;\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js new file mode 100644 index 0000000..05d1225 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Sanitizer } from "../util/sanitizer"; +import { logger as coreLogger } from "../log"; +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger = coreLogger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map new file mode 100644 index 0000000..ac9f983 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.js","sourceRoot":"","sources":["../../../src/policies/logPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,MAAM,IAAI,UAAU,EAAE,MAAM,QAAQ,CAAC;AAiC9C;;;;GAIG;AACH,MAAM,UAAU,SAAS,CAAC,iBAAmC,EAAE;IAC7D,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QAC5D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,iBAAiB;IA4C9C,YACE,UAAyB,EACzB,OAA6B,EAC7B,EACE,MAAM,GAAG,UAAU,CAAC,IAAI,EACxB,kBAAkB,GAAG,EAAE,EACvB,sBAAsB,GAAG,EAAE,MACP,EAAE;QAExB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,CAAC,CAAC;IACjF,CAAC;IApDD;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB,CAAC,kBAA+B;QAC3D,IAAI,CAAC,SAAS,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;IACzD,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB;QAC/B,OAAO,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC;IAC/C,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB,CAAC,sBAAmC;QACnE,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;IACjE,CAAC;IAgBM,WAAW,CAAC,OAAwB;QACzC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAEvE,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC9F,CAAC;IAEO,UAAU,CAAC,OAAwB;QACzC,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IAC9D,CAAC;IAEO,WAAW,CAAC,QAA+B;QACjD,IAAI,CAAC,MAAM,CAAC,yBAAyB,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;QACxD,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACrE,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Debugger } from \"@azure/logger\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Sanitizer } from \"../util/sanitizer\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger as coreLogger } from \"../log\";\n\n/**\n * Options to pass to the {@link logPolicy}.\n * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged.\n */\nexport interface LogPolicyOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to:\n * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id,\n * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials,\n * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers,\n * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding,\n * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match,\n * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent.\n *\n * Any headers specified in this field will be added to that list.\n * Any other values will be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n\n /**\n * The Debugger (logger) instance to use for writing pipeline logs.\n */\n logger?: Debugger;\n}\n\n/**\n * Creates a policy that logs information about the outgoing request and the incoming responses.\n * @param loggingOptions - Logging options.\n * @returns An instance of the {@link LogPolicy}\n */\nexport function logPolicy(loggingOptions: LogPolicyOptions = {}): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n },\n };\n}\n\n/**\n * A policy that logs information about the outgoing request and the incoming responses.\n */\nexport class LogPolicy extends BaseRequestPolicy {\n logger: Debugger;\n sanitizer: Sanitizer;\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedHeaderNames(): Set {\n return this.sanitizer.allowedHeaderNames;\n }\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedHeaderNames(allowedHeaderNames: Set) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedQueryParameters(): Set {\n return this.sanitizer.allowedQueryParameters;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedQueryParameters(allowedQueryParameters: Set) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n }\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n {\n logger = coreLogger.info,\n allowedHeaderNames = [],\n allowedQueryParameters = [],\n }: LogPolicyOptions = {}\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters });\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!this.logger.enabled) return this._nextPolicy.sendRequest(request);\n\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response));\n }\n\n private logRequest(request: WebResourceLike): void {\n this.logger(`Request: ${this.sanitizer.sanitize(request)}`);\n }\n\n private logResponse(response: HttpOperationResponse): HttpOperationResponse {\n this.logger(`Response status code: ${response.status}`);\n this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`);\n return response;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js new file mode 100644 index 0000000..3646799 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function getDefaultUserAgentKey() { + return "x-ms-useragent"; +} +export function getPlatformSpecificData() { + const navigator = self.navigator; + const osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map new file mode 100644 index 0000000..b6f1956 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAalC,MAAM,UAAU,sBAAsB;IACpC,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;IAChD,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;KAChE,CAAC;IAEF,OAAO,CAAC,MAAM,CAAC,CAAC;AAClB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-useragent\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js new file mode 100644 index 0000000..e1210a9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as os from "os"; +import { Constants } from "../util/constants"; +export function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +export function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os.arch()}-${os.type()}-${os.release()})`, + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map new file mode 100644 index 0000000..738f7c7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,MAAM,UAAU,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as os from \"os\";\nimport { Constants } from \"../util/constants\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js new file mode 100644 index 0000000..fe17cf3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const { Platform } = require("react-native"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports +export function getDefaultUserAgentKey() { + return "x-ms-useragent"; +} +export function getPlatformSpecificData() { + const { major, minor, patch } = Platform.constants.reactNativeVersion; + const runtimeInfo = { + key: "react-native", + value: `${major}.${minor}.${patch}`, + }; + const osInfo = { + key: "OS", + value: `${Platform.OS}-${Platform.Version}`, + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.native.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map new file mode 100644 index 0000000..641603b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.native.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.native.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,MAAM,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,+FAA+F;AAE7I,MAAM,UAAU,sBAAsB;IACpC,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAC,SAAS,CAAC,kBAAkB,CAAC;IACtE,MAAM,WAAW,GAAG;QAClB,GAAG,EAAE,cAAc;QACnB,KAAK,EAAE,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,EAAE;KACpC,CAAC;IAEF,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,GAAG,QAAQ,CAAC,EAAE,IAAI,QAAQ,CAAC,OAAO,EAAE;KAC5C,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"react-native\" section in package.json.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\nconst { Platform } = require(\"react-native\"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-useragent\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const { major, minor, patch } = Platform.constants.reactNativeVersion;\n const runtimeInfo = {\n key: \"react-native\",\n value: `${major}.${minor}.${patch}`,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `${Platform.OS}-${Platform.Version}`,\n };\n\n return [runtimeInfo, osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js new file mode 100644 index 0000000..19119e5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// BaseRequestPolicy has a protected constructor. +/* eslint-disable @typescript-eslint/no-useless-constructor */ +import { BaseRequestPolicy, } from "./requestPolicy"; +export function ndJsonPolicy() { + return { + create: (nextPolicy, options) => { + return new NdJsonPolicy(nextPolicy, options); + }, + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map new file mode 100644 index 0000000..4513fcf --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ndJsonPolicy.js","sourceRoot":"","sources":["../../../src/policies/ndJsonPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,iDAAiD;AACjD,8DAA8D;AAE9D,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,UAAU,YAAY;IAC1B,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC/C,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,YAAa,SAAQ,iBAAiB;IAC1C;;OAEG;IACH,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,4DAA4D;QAC5D,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACpE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;YACtC,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;aACzE;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// BaseRequestPolicy has a protected constructor.\n/* eslint-disable @typescript-eslint/no-useless-constructor */\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function ndJsonPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new NdJsonPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nclass NdJsonPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends a request.\n */\n public async sendRequest(request: WebResourceLike): Promise {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n const body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map((item) => JSON.stringify(item) + \"\\n\").join(\"\");\n }\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js new file mode 100644 index 0000000..84490b8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +const errorMessage = "ProxyPolicy is not supported in browser environment"; +export function getDefaultProxySettings(_proxyUrl) { + return undefined; +} +export function proxyPolicy(_proxySettings) { + return { + create: (_nextPolicy, _options) => { + throw new Error(errorMessage); + }, + }; +} +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + throw new Error(errorMessage); + } + sendRequest(_request) { + throw new Error(errorMessage); + } +} +//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map new file mode 100644 index 0000000..c37da19 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAKzB,MAAM,YAAY,GAAG,qDAAqD,CAAC;AAE3E,MAAM,UAAU,uBAAuB,CAAC,SAAkB;IACxD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,CAAC,WAA0B,EAAE,QAA8B,EAAE,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChC,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,WAAY,SAAQ,iBAAiB;IAChD,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;IAChC,CAAC;IAEM,WAAW,CAAC,QAAyB;QAC1C,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;IAChC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst errorMessage = \"ProxyPolicy is not supported in browser environment\";\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptions) => {\n throw new Error(errorMessage);\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n throw new Error(errorMessage);\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw new Error(errorMessage);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js new file mode 100644 index 0000000..031230b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; +import { getEnvironmentValue } from "../util/utils"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +export function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; + } + sendRequest(request) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map new file mode 100644 index 0000000..484a8f4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.js","sourceRoot":"","sources":["../../../src/policies/proxyPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EAAE,mBAAmB,EAAE,MAAM,eAAe,CAAC;AAEpD;;;GAGG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAa,EAAE,CAAC;AAC9C,IAAI,iBAAiB,GAAY,KAAK,CAAC;AAEvC,yDAAyD;AACzD,MAAM,iBAAiB,GAAyB,IAAI,GAAG,EAAE,CAAC;AAE1D,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,MAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;;;;GAIG;AACH,SAAS,UAAU,CACjB,GAAW,EACX,WAAqB,EACrB,WAAkC;IAElC,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC9B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACtB,mEAAmE;YACnE,mDAAmD;YACnD,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,iBAAiB,GAAG,IAAI,CAAC;IACzB,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;aAC1B,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5E,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ;QACR,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,aAA6B,EAC7B,OAGC;IAED,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,IAAI,CAAC,iBAAiB,EAAE;QACtB,iBAAiB,CAAC,IAAI,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;KAC1C;IACD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,oBAA0C,EAAE,EAAE;YAChF,OAAO,IAAI,WAAW,CACpB,UAAU,EACV,oBAAoB,EACpB,aAAc,EACd,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAC3B,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW;IAKrC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,MAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,MAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACpE,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1E,MAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ;QACR,QAAQ;QACR,cAAc;KACf,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,WAAY,SAAQ,iBAAiB;IAChD,YACE,UAAyB,EACzB,OAA6B,EACtB,aAA4B,EAC3B,iBAA4B;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAHpB,kBAAa,GAAb,aAAa,CAAe;QAC3B,sBAAiB,GAAjB,iBAAiB,CAAW;IAGtC,CAAC;IAEM,WAAW,CAAC,OAAwB;;QACzC,IACE,CAAC,OAAO,CAAC,aAAa;YACtB,CAAC,UAAU,CACT,OAAO,CAAC,GAAG,EACX,MAAA,IAAI,CAAC,iBAAiB,mCAAI,iBAAiB,EAC3C,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,iBAAiB,CACvD,EACD;YACA,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getEnvironmentValue } from \"../util/utils\";\n\n/**\n * Stores the patterns specified in NO_PROXY environment variable.\n * @internal\n */\nexport const globalNoProxyList: string[] = [];\nlet noProxyListLoaded: boolean = false;\n\n/** A cache of whether a host should bypass the proxy. */\nconst globalBypassedMap: Map = new Map();\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n/**\n * Check whether the host of a given `uri` matches any pattern in the no proxy list.\n * If there's a match, any request sent to the same host shouldn't have the proxy settings set.\n * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\n */\nfunction isBypassed(\n uri: string,\n noProxyList: string[],\n bypassedMap?: Map\n): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (bypassedMap?.has(host)) {\n return bypassedMap.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n bypassedMap?.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n noProxyListLoaded = true;\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed.\n * @param proxyUrl - URL of the proxy\n * @returns The default proxy settings, or undefined.\n */\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\n/**\n * A policy that allows one to apply proxy settings to all requests.\n * If not passed static settings, they will be retrieved from the HTTPS_PROXY\n * or HTTP_PROXY environment variables.\n * @param proxySettings - ProxySettings to use on each request.\n * @param options - additional settings, for example, custom NO_PROXY patterns\n */\nexport function proxyPolicy(\n proxySettings?: ProxySettings,\n options?: {\n /** a list of patterns to override those loaded from NO_PROXY environment variable. */\n customNoProxyList?: string[];\n }\n): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n if (!noProxyListLoaded) {\n globalNoProxyList.push(...loadNoProxy());\n }\n return {\n create: (nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions) => {\n return new ProxyPolicy(\n nextPolicy,\n requestPolicyOptions,\n proxySettings!,\n options?.customNoProxyList\n );\n },\n };\n}\n\nfunction extractAuthFromUrl(url: string): {\n username?: string;\n password?: string;\n urlWithoutAuth: string;\n} {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public proxySettings: ProxySettings,\n private customNoProxyList?: string[]\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (\n !request.proxySettings &&\n !isBypassed(\n request.url,\n this.customNoProxyList ?? globalNoProxyList,\n this.customNoProxyList ? undefined : globalBypassedMap\n )\n ) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js new file mode 100644 index 0000000..2a8a059 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { URLBuilder } from "../url"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +export const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map new file mode 100644 index 0000000..a4c6c8c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.js","sourceRoot":"","sources":["../../../src/policies/redirectPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;GAEG;AACH,MAAM,eAAe,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AAkBxC,MAAM,CAAC,MAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,cAAc,GAAG,EAAE;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACjE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,cAAe,SAAQ,iBAAiB;IACnD,YAAY,UAAyB,EAAE,OAA6B,EAAW,aAAa,EAAE;QAC5F,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QADkD,eAAU,GAAV,UAAU,CAAK;IAE9F,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEtB,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IACrC,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;QACd,CAAC,MAAM,KAAK,GAAG;YACb,CAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;QACjB,CAAC,CAAC,MAAM,CAAC,UAAU,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,EAC1D;QACA,MAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;QAEjC,+DAA+D;QAC/D,+EAA+E;QAC/E,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;KACnE;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nconst allowedRedirect = [\"GET\", \"HEAD\"];\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /**\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /**\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\n/**\n * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n * @param maximumRetries - Maximum number of redirects to follow.\n * @returns An instance of the {@link RedirectPolicy}\n */\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\n/**\n * Resends the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n */\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, readonly maxRetries = 20) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1));\n }\n\n return Promise.resolve(response);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js new file mode 100644 index 0000000..639c271 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +/** + * The base class from which all request policies derive. + */ +export class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export class RequestPolicyOptions { + constructor(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return (!!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} +//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map new file mode 100644 index 0000000..eb09d86 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.js","sourceRoot":"","sources":["../../../src/policies/requestPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAsB/D;;GAEG;AACH,MAAM,OAAgB,iBAAiB;IACrC;;OAEG;IACH;IACE;;OAEG;IACM,WAA0B;IACnC;;OAEG;IACM,QAAkC;QAJlC,gBAAW,GAAX,WAAW,CAAe;QAI1B,aAAQ,GAAR,QAAQ,CAA0B;IAC1C,CAAC;IAQJ;;;;OAIG;IACI,SAAS,CAAC,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;CACF;AAsBD;;GAEG;AACH,MAAM,OAAO,oBAAoB;IAC/B,YAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;IAAG,CAAC;IAEpD;;;;OAIG;IACI,SAAS,CAAC,QAA8B;QAC7C,OAAO,CACL,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAK,oBAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,CACzC,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\n/**\n * The underlying structure of a request policy.\n */\nexport interface RequestPolicy {\n /**\n * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made.\n * @param httpRequest - {@link WebResourceLike} describing the request to be made.\n */\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\n/**\n * The base class from which all request policies derive.\n */\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n /**\n * The main method to implement that manipulates a request/response.\n */\n protected constructor(\n /**\n * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.\n */\n readonly _nextPolicy: RequestPolicy,\n /**\n * The options that can be passed to a given request policy.\n */\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n /**\n * Sends a network request based on the given web resource.\n * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made.\n */\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js new file mode 100644 index 0000000..9713bcc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { delay } from "@azure/core-util"; +export function rpRegistrationPolicy(retryTimeout = 30) { + return { + create: (nextPolicy, options) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +export class RPRegistrationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _retryTimeout = 30) { + super(nextPolicy, options); + this._retryTimeout = _retryTimeout; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param originalRequest - The original request + * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. + * @returns A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo = false) { + const reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param body - The response body received after making the original request. + * @returns The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param url - The original request url + * @returns The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} +/** + * Registers the given provider. + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param provider - The provider name to be registered. + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + */ +async function registerRP(policy, urlPrefix, provider, originalRequest) { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + const response = await policy._nextPolicy.sendRequest(reqOptions); + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param url - The request url for polling + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns True if RP Registration is successful. + */ +async function getRegistrationStatus(policy, url, originalRequest) { + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + const res = await policy._nextPolicy.sendRequest(reqOptions); + const obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + await delay(policy._retryTimeout * 1000); + return getRegistrationStatus(policy, url, originalRequest); + } +} +//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map new file mode 100644 index 0000000..10c06f2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.js","sourceRoot":"","sources":["../../../src/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AACvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC,MAAM,UAAU,oBAAoB,CAAC,YAAY,GAAG,EAAE;IACpD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,oBAAqB,SAAQ,iBAAiB;IACzD,YACE,UAAyB,EACzB,OAA6B,EACpB,gBAAgB,EAAE;QAE3B,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFlB,kBAAa,GAAb,aAAa,CAAK;IAG7B,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;IACnE,CAAC;CACF;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,MAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,OAAO,CACL,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;gBAC5C,wFAAwF;gBACxF,kFAAkF;gBAClF,uFAAuF;iBACtF,KAAK,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC;iBAClB,IAAI,CAAC,CAAC,kBAAkB,EAAE,EAAE;gBAC3B,IAAI,kBAAkB,EAAE;oBACtB,2EAA2E;oBAC3E,0EAA0E;oBAC1E,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC,CACL,CAAC;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAW,GAAG,KAAK;IAEnB,MAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;IAED,wEAAwE;IACxE,iDAAiD;IACjD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;IAEvE,uCAAuC;IACvC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;GAKG;AACH,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAQ,EAAE;YACjB,cAAc;SACf;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,yDAAyD,GAAG,GAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,KAAK,UAAU,UAAU,CACvB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,MAAM,OAAO,GAAG,GAAG,SAAS,aAAa,QAAQ,kCAAkC,CAAC;IACpF,MAAM,MAAM,GAAG,GAAG,SAAS,aAAa,QAAQ,yBAAyB,CAAC;IAC1E,MAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAClE,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,2CAA2C,CAAC,CAAC;KAC7F;IACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;AAChE,CAAC;AAED;;;;;;;;GAQG;AACH,KAAK,UAAU,qBAAqB,CAClC,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,MAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAC7D,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC;IAC3B,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;QACrF,OAAO,IAAI,CAAC;KACb;SAAM;QACL,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,CAAC;QACzC,OAAO,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;KAC5D;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"../util/utils\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err: any) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n */\nasync function registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n const response = await policy._nextPolicy.sendRequest(reqOptions);\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nasync function getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n const res = await policy._nextPolicy.sendRequest(reqOptions);\n const obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n await delay(policy._retryTimeout * 1000);\n return getRegistrationStatus(policy, url, originalRequest);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js new file mode 100644 index 0000000..c298812 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export function signingPolicy(authenticationProvider) { + return { + create: (nextPolicy, options) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} +//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map new file mode 100644 index 0000000..1fbcc98 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.js","sourceRoot":"","sources":["../../../src/policies/signingPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAKzB;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;QACxE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,iBAAiB;IAClD,YACE,UAAyB,EACzB,OAA6B,EACtB,sBAAgD;QAEvD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFpB,2BAAsB,GAAtB,sBAAsB,CAA0B;IAGzD,CAAC;IAED,WAAW,CAAC,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC1D,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE,CACpD,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAC1C,CAAC;IACJ,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n * @param authenticationProvider - The authentication provider.\n * @returns An instance of the {@link SigningPolicy}.\n */\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\n/**\n * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n */\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js new file mode 100644 index 0000000..3832f41 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_MIN_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; +import { delay } from "@azure/core-util"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export class SystemErrorRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} +async function retry(policy, request, operationResponse, err, retryData) { + retryData = updateRetryData(policy, retryData, err); + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; + } + if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } + catch (nestedErr) { + return retry(policy, request, operationResponse, nestedErr, retryData); + } + } + else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map new file mode 100644 index 0000000..5575e7b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,iCAAiC,EACjC,iCAAiC,EACjC,0BAA0B,EAC1B,6BAA6B,EAG7B,QAAQ,EACR,WAAW,EACX,eAAe,GAChB,MAAM,oCAAoC,CAAC;AAG5C,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC;;;;;;;GAOG;AACH,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IAM3D,YACE,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;QACtC,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;IACxC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IACnE,CAAC;CACF;AAED,KAAK,UAAU,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;IAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;IAEpD,SAAS,iBAAiB,CAAC,SAAiC,EAAE,KAAkB;QAC9E,IACE,KAAK;YACL,KAAK,CAAC,IAAI;YACV,CAAC,KAAK,CAAC,IAAI,KAAK,WAAW;gBACzB,KAAK,CAAC,IAAI,KAAK,iBAAiB;gBAChC,KAAK,CAAC,IAAI,KAAK,cAAc;gBAC7B,KAAK,CAAC,IAAI,KAAK,YAAY;gBAC3B,KAAK,CAAC,IAAI,KAAK,QAAQ,CAAC,EAC1B;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAE;QACxF,mFAAmF;QACnF,IAAI;YACF,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;SACxD;QAAC,OAAO,SAAc,EAAE;YACvB,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;SACxE;KACF;SAAM;QACL,IAAI,GAAG,EAAE;YACP,qFAAqF;YACrF,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;SACxC;QACD,OAAO,iBAAiB,CAAC;KAC1B;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n * @returns An instance of the {@link SystemErrorRetryPolicy}\n */\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n\n function shouldPolicyRetry(_response?: HttpOperationResponse, error?: RetryError): boolean {\n if (\n error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")\n ) {\n return true;\n }\n return false;\n }\n\n if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (nestedErr: any) {\n return retry(policy, request, operationResponse, nestedErr, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js new file mode 100644 index 0000000..c9fac19 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { AbortError } from "@azure/abort-controller"; +import { Constants } from "../util/constants"; +import { DEFAULT_CLIENT_MAX_RETRY_COUNT } from "../util/throttlingRetryStrategy"; +import { delay } from "@azure/core-util"; +const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export function throttlingRetryPolicy() { + return { + create: (nextPolicy, options) => { + return new ThrottlingRetryPolicy(nextPolicy, options); + }, + }; +} +const StandardAbortMessage = "The operation was aborted."; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export class ThrottlingRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _handleResponse) { + super(nextPolicy, options); + this.numberOfRetries = 0; + this._handleResponse = _handleResponse || this._defaultResponseHandler; + } + async sendRequest(httpRequest) { + const response = await this._nextPolicy.sendRequest(httpRequest.clone()); + if (response.status !== StatusCodes.TooManyRequests && + response.status !== StatusCodes.ServiceUnavailable) { + return response; + } + else { + return this._handleResponse(httpRequest, response); + } + } + async _defaultResponseHandler(httpRequest, httpResponse) { + var _a; + const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + this.numberOfRetries += 1; + await delay(delayInMs, { + abortSignal: httpRequest.abortSignal, + abortErrorMsg: StandardAbortMessage, + }); + if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new AbortError(StandardAbortMessage); + } + if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { + return this.sendRequest(httpRequest); + } + else { + return this._nextPolicy.sendRequest(httpRequest); + } + } + } + return httpResponse; + } + static parseRetryAfterHeader(headerValue) { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + } + static parseDateRetryAfterHeader(headerValue) { + try { + const now = Date.now(); + const date = Date.parse(headerValue); + const diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + } +} +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map new file mode 100644 index 0000000..9d34321 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,8BAA8B,EAAE,MAAM,iCAAiC,CAAC;AAGjF,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAMzC,MAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AAExD;;;;;;;;;GASG;AACH,MAAM,UAAU,qBAAqB;IACnC,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACxD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;GAQG;AACH,MAAM,OAAO,qBAAsB,SAAQ,iBAAiB;IAI1D,YACE,UAAyB,EACzB,OAA6B,EAC7B,eAAiC;QAEjC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAPrB,oBAAe,GAAG,CAAC,CAAC;QAQ1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,IAAI,CAAC,uBAAuB,CAAC;IACzE,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;QACnD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC;QACzE,IACE,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe;YAC/C,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,kBAAkB,EAClD;YACA,OAAO,QAAQ,CAAC;SACjB;aAAM;YACL,OAAO,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;SACpD;IACH,CAAC;IAEO,KAAK,CAAC,uBAAuB,CACnC,WAA4B,EAC5B,YAAmC;;QAEnC,MAAM,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;QAEF,IAAI,gBAAgB,EAAE;YACpB,MAAM,SAAS,GACb,qBAAqB,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;YAChE,IAAI,SAAS,EAAE;gBACb,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;gBAE1B,MAAM,KAAK,CAAC,SAAS,EAAE;oBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;oBACpC,aAAa,EAAE,oBAAoB;iBACpC,CAAC,CAAC;gBAEH,IAAI,MAAA,WAAW,CAAC,WAAW,0CAAE,OAAO,EAAE;oBACpC,MAAM,IAAI,UAAU,CAAC,oBAAoB,CAAC,CAAC;iBAC5C;gBAED,IAAI,IAAI,CAAC,eAAe,GAAG,8BAA8B,EAAE;oBACzD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBACtC;qBAAM;oBACL,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBAClD;aACF;SACF;QAED,OAAO,YAAY,CAAC;IACtB,CAAC;IAEM,MAAM,CAAC,qBAAqB,CAAC,WAAmB;QACrD,MAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;IACH,CAAC;IAEM,MAAM,CAAC,yBAAyB,CAAC,WAAmB;QACzD,IAAI;YACF,MAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,MAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAU,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { Constants } from \"../util/constants\";\nimport { DEFAULT_CLIENT_MAX_RETRY_COUNT } from \"../util/throttlingRetryStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\ntype ResponseHandler = (\n httpRequest: WebResourceLike,\n response: HttpOperationResponse\n) => Promise;\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n * @returns\n */\nexport function throttlingRetryPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n },\n };\n}\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private _handleResponse: ResponseHandler;\n private numberOfRetries = 0;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n _handleResponse?: ResponseHandler\n ) {\n super(nextPolicy, options);\n this._handleResponse = _handleResponse || this._defaultResponseHandler;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n const response = await this._nextPolicy.sendRequest(httpRequest.clone());\n if (\n response.status !== StatusCodes.TooManyRequests &&\n response.status !== StatusCodes.ServiceUnavailable\n ) {\n return response;\n } else {\n return this._handleResponse(httpRequest, response);\n }\n }\n\n private async _defaultResponseHandler(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse\n ): Promise {\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader) {\n const delayInMs: number | undefined =\n ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n this.numberOfRetries += 1;\n\n await delay(delayInMs, {\n abortSignal: httpRequest.abortSignal,\n abortErrorMsg: StandardAbortMessage,\n });\n\n if (httpRequest.abortSignal?.aborted) {\n throw new AbortError(StandardAbortMessage);\n }\n\n if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) {\n return this.sendRequest(httpRequest);\n } else {\n return this._nextPolicy.sendRequest(httpRequest);\n }\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error: any) {\n return undefined;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js new file mode 100644 index 0000000..2a1b6b3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { SpanKind, SpanStatusCode, createSpanFunction, getTraceParentHeader, isSpanContextValid, } from "@azure/core-tracing"; +import { logger } from "../log"; +const createSpan = createSpanFunction({ + packagePrefix: "", + namespace: "", +}); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export function tracingPolicy(tracingOptions = {}) { + return { + create(nextPolicy, options) { + return new TracingPolicy(nextPolicy, options, tracingOptions); + }, + }; +} +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export class TracingPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, tracingOptions) { + super(nextPolicy, options); + this.userAgent = tracingOptions.userAgent; + } + async sendRequest(request) { + if (!request.tracingContext) { + return this._nextPolicy.sendRequest(request); + } + const span = this.tryCreateSpan(request); + if (!span) { + return this._nextPolicy.sendRequest(request); + } + try { + const response = await this._nextPolicy.sendRequest(request); + this.tryProcessResponse(span, response); + return response; + } + catch (err) { + this.tryProcessError(span, err); + throw err; + } + } + tryCreateSpan(request) { + var _a; + try { + // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. + // We can pass this as a separate parameter once we upgrade to the latest core-tracing. + const { span } = createSpan(`HTTP ${request.method}`, { + tracingOptions: { + spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: SpanKind.CLIENT }), + tracingContext: request.tracingContext, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); + if (typeof namespaceFromContext === "string") { + span.setAttribute("az.namespace", namespaceFromContext); + } + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + // set headers + const spanContext = span.spanContext(); + const traceParentHeader = getTraceParentHeader(spanContext); + if (traceParentHeader && isSpanContextValid(spanContext)) { + request.headers.set("traceparent", traceParentHeader); + const traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return span; + } + catch (error) { + logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); + return undefined; + } + } + tryProcessError(span, err) { + try { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: err.message, + }); + if (err.statusCode) { + span.setAttribute("http.status_code", err.statusCode); + } + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } + tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: SpanStatusCode.OK, + }); + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map new file mode 100644 index 0000000..16c3c85 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracingPolicy.js","sourceRoot":"","sources":["../../../src/policies/tracingPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAEL,QAAQ,EACR,cAAc,EACd,kBAAkB,EAClB,oBAAoB,EACpB,kBAAkB,GACnB,MAAM,qBAAqB,CAAC;AAG7B,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC,MAAM,UAAU,GAAG,kBAAkB,CAAC;IACpC,aAAa,EAAE,EAAE;IACjB,SAAS,EAAE,EAAE;CACd,CAAC,CAAC;AAYH;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,iBAAuC,EAAE;IACrE,OAAO;QACL,MAAM,CAAC,UAAyB,EAAE,OAA6B;YAC7D,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QAChE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,iBAAiB;IAGlD,YACE,UAAyB,EACzB,OAA6B,EAC7B,cAAoC;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,SAAS,GAAG,cAAc,CAAC,SAAS,CAAC;IAC5C,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;YAC7D,IAAI,CAAC,kBAAkB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;YACxC,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;YAChC,MAAM,GAAG,CAAC;SACX;IACH,CAAC;IAED,aAAa,CAAC,OAAwB;;QACpC,IAAI;YACF,sHAAsH;YACtH,uFAAuF;YACvF,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,QAAQ,OAAO,CAAC,MAAM,EAAE,EAAE;gBACpD,cAAc,EAAE;oBACd,WAAW,kCACL,OAAe,CAAC,WAAW,KAC/B,IAAI,EAAE,QAAQ,CAAC,MAAM,GACtB;oBACD,cAAc,EAAE,OAAO,CAAC,cAAc;iBACvC;aACF,CAAC,CAAC;YAEH,wDAAwD;YACxD,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACvB,IAAI,CAAC,GAAG,EAAE,CAAC;gBACX,OAAO,SAAS,CAAC;aAClB;YAED,MAAM,oBAAoB,GAAG,MAAA,OAAO,CAAC,cAAc,0CAAE,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;YAE1F,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;gBAC5C,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,oBAAoB,CAAC,CAAC;aACzD;YAED,IAAI,CAAC,aAAa,CAAC;gBACjB,aAAa,EAAE,OAAO,CAAC,MAAM;gBAC7B,UAAU,EAAE,OAAO,CAAC,GAAG;gBACvB,SAAS,EAAE,OAAO,CAAC,SAAS;aAC7B,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;aACtD;YAED,cAAc;YACd,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;YACvC,MAAM,iBAAiB,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;YAC5D,IAAI,iBAAiB,IAAI,kBAAkB,CAAC,WAAW,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,iBAAiB,CAAC,CAAC;gBACtD,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC;gBAChF,0FAA0F;gBAC1F,IAAI,UAAU,EAAE;oBACd,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC/C;aACF;YACD,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;YACrF,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;IAEO,eAAe,CAAC,IAAU,EAAE,GAAQ;QAC1C,IAAI;YACF,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;aACrB,CAAC,CAAC;YAEH,IAAI,GAAG,CAAC,UAAU,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;SACtF;IACH,CAAC;IAEO,kBAAkB,CAAC,IAAU,EAAE,QAA+B;QACpE,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;YACvD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;YACjE,IAAI,gBAAgB,EAAE;gBACpB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACzD;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,EAAE;aACxB,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;SACtF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n Span,\n SpanKind,\n SpanStatusCode,\n createSpanFunction,\n getTraceParentHeader,\n isSpanContextValid,\n} from \"@azure/core-tracing\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger } from \"../log\";\n\nconst createSpan = createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\",\n});\n\n/**\n * Options to customize the tracing policy.\n */\nexport interface TracingPolicyOptions {\n /**\n * User agent used to better identify the outgoing requests traced by the tracing policy.\n */\n userAgent?: string;\n}\n\n/**\n * Creates a policy that wraps outgoing requests with a tracing span.\n * @param tracingOptions - Tracing options.\n * @returns An instance of the {@link TracingPolicy} class.\n */\nexport function tracingPolicy(tracingOptions: TracingPolicyOptions = {}): RequestPolicyFactory {\n return {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n },\n };\n}\n\n/**\n * A policy that wraps outgoing requests with a tracing span.\n */\nexport class TracingPolicy extends BaseRequestPolicy {\n private userAgent?: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n tracingOptions: TracingPolicyOptions\n ) {\n super(nextPolicy, options);\n this.userAgent = tracingOptions.userAgent;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n if (!request.tracingContext) {\n return this._nextPolicy.sendRequest(request);\n }\n\n const span = this.tryCreateSpan(request);\n\n if (!span) {\n return this._nextPolicy.sendRequest(request);\n }\n\n try {\n const response = await this._nextPolicy.sendRequest(request);\n this.tryProcessResponse(span, response);\n return response;\n } catch (err: any) {\n this.tryProcessError(span, err);\n throw err;\n }\n }\n\n tryCreateSpan(request: WebResourceLike): Span | undefined {\n try {\n // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier.\n // We can pass this as a separate parameter once we upgrade to the latest core-tracing.\n const { span } = createSpan(`HTTP ${request.method}`, {\n tracingOptions: {\n spanOptions: {\n ...(request as any).spanOptions,\n kind: SpanKind.CLIENT,\n },\n tracingContext: request.tracingContext,\n },\n });\n\n // If the span is not recording, don't do any more work.\n if (!span.isRecording()) {\n span.end();\n return undefined;\n }\n\n const namespaceFromContext = request.tracingContext?.getValue(Symbol.for(\"az.namespace\"));\n\n if (typeof namespaceFromContext === \"string\") {\n span.setAttribute(\"az.namespace\", namespaceFromContext);\n }\n\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId,\n });\n\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n\n // set headers\n const spanContext = span.spanContext();\n const traceParentHeader = getTraceParentHeader(spanContext);\n if (traceParentHeader && isSpanContextValid(spanContext)) {\n request.headers.set(\"traceparent\", traceParentHeader);\n const traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return span;\n } catch (error: any) {\n logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`);\n return undefined;\n }\n }\n\n private tryProcessError(span: Span, err: any): void {\n try {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: err.message,\n });\n\n if (err.statusCode) {\n span.setAttribute(\"http.status_code\", err.statusCode);\n }\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n\n private tryProcessResponse(span: Span, response: HttpOperationResponse): void {\n try {\n span.setAttribute(\"http.status_code\", response.status);\n const serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.setStatus({\n code: SpanStatusCode.OK,\n });\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js new file mode 100644 index 0000000..527856c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { Constants } from "../util/constants"; +import { HttpHeaders } from "../httpHeaders"; +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map new file mode 100644 index 0000000..cbc2aa1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.js","sourceRoot":"","sources":["../../../src/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAoB7C,SAAS,cAAc;IACrB,MAAM,aAAa,GAAG;QACpB,GAAG,EAAE,WAAW;QAChB,KAAK,EAAE,SAAS,CAAC,eAAe;KACjC,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAY,GAAG,GAAG,EAClB,cAAc,GAAG,GAAG;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,OAAO,GAAG,IAAI,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC;IAC/B,CAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,MAAM,CAAC,MAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE;;;GAGG;AACH,MAAM,UAAU,wBAAwB;IACtC,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,MAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,MAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,aAA6B;IAC3D,MAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,KAAK,IAAI;QAC7E,CAAC,CAAC,sBAAsB,EAAE;QAC1B,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;IACxB,MAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,KAAK,SAAS,IAAI,aAAa,CAAC,KAAK,KAAK,IAAI;QACjF,CAAC,CAAC,wBAAwB,EAAE;QAC5B,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IACpD,YACW,WAA0B,EAC1B,QAA8B,EAC7B,SAAiB,EACjB,WAAmB;QAE7B,KAAK,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QALpB,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAAsB;QAC7B,cAAS,GAAT,SAAS,CAAQ;QACjB,gBAAW,GAAX,WAAW,CAAQ;IAG/B,CAAC;IAED,WAAW,CAAC,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED;;OAEG;IACH,kBAAkB,CAAC,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Telemetry information. Key/value pairs to include inside the User-Agent string.\n */\nexport type TelemetryInfo = { key?: string; value?: string };\n\n/**\n * Options for adding user agent details to outgoing requests.\n */\nexport interface UserAgentOptions {\n /**\n * String prefix to add to the user agent for outgoing requests.\n * Defaults to an empty string.\n */\n userAgentPrefix?: string;\n}\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\n/**\n * The default approach to generate user agents.\n * Uses static information from this package, plus system information available from the runtime.\n */\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\n/**\n * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n * @param userAgentData - Telemetry information.\n * @returns A new {@link UserAgentPolicy}.\n */\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\n/**\n * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n */\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptions,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n /**\n * Adds the user agent header to the outgoing request.\n */\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js new file mode 100644 index 0000000..4746997 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as tunnel from "tunnel"; +import { URLBuilder } from "./url"; +export function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +export function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} +//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map new file mode 100644 index 0000000..741f8ef --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.js","sourceRoot":"","sources":["../../src/proxyAgent.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAGjC,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAGnC,MAAM,UAAU,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY,CAAC;IACtE,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;KAClE;IACD,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,IAAI,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CAAC,6EAA6E,CAAC,CAAC;KAChG;IACD,MAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,IAAI;YACV,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC;KAC9D;IAED,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;IAEpD,MAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC1D,OAAO,SAAS,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;AAC7C,CAAC;AAED,MAAM,UAAU,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAA2C;IAE3C,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAO,MAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAO,MAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC;AAED,SAAS,WAAW,CAAC,IAAY;IAC/B,sFAAsF;IACtF,oFAAoF;IACpF,OAAO,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC;AACpC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const host = URLBuilder.parse(proxySettings.host).getHost() as string;\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const isRequestHttps = isUrlHttps(requestUrl);\n const isProxyHttps = isUrlHttps(proxySettings.host);\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\nexport function isUrlHttps(url: string): boolean {\n const urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: tunnel.HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n\nfunction isValidPort(port: number): boolean {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js new file mode 100644 index 0000000..ff08c3d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export var QueryCollectionFormat; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(QueryCollectionFormat || (QueryCollectionFormat = {})); +//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map new file mode 100644 index 0000000..2a917a8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.js","sourceRoot":"","sources":["../../src/queryCollectionFormat.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,qBAqBX;AArBD,WAAY,qBAAqB;IAC/B;;OAEG;IACH,kCAAS,CAAA;IACT;;OAEG;IACH,kCAAS,CAAA;IACT;;OAEG;IACH,mCAAU,CAAA;IACV;;OAEG;IACH,oCAAW,CAAA;IACX;;OAEG;IACH,wCAAe,CAAA;AACjB,CAAC,EArBW,qBAAqB,KAArB,qBAAqB,QAqBhC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n /**\n * CSV: Each pair of segments joined by a single comma.\n */\n Csv = \",\",\n /**\n * SSV: Each pair of segments joined by a single space character.\n */\n Ssv = \" \",\n /**\n * TSV: Each pair of segments joined by a single tab character.\n */\n Tsv = \"\\t\",\n /**\n * Pipes: Each pair of segments joined by a single pipe character.\n */\n Pipes = \"|\",\n /**\n * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2`\n */\n Multi = \"Multi\",\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js b/node_modules/@azure/core-http/dist-esm/src/restError.js new file mode 100644 index 0000000..88d4a78 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/restError.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Sanitizer } from "./util/sanitizer"; +import { custom } from "./util/inspect"; +const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export class RestError extends Error { + constructor(message, code, statusCode, request, response) { + super(message); + this.name = "RestError"; + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js.map b/node_modules/@azure/core-http/dist-esm/src/restError.js.map new file mode 100644 index 0000000..8270ecf --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/restError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.js","sourceRoot":"","sources":["../../src/restError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,OAAO,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAExC,MAAM,cAAc,GAAG,IAAI,SAAS,EAAE,CAAC;AAEvC;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,KAAK;IA8BlC,YACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC;QAEhC,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QAEzB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;IACnD,CAAC;IAED;;OAEG;IACH,CAAC,MAAM,CAAC;QACN,OAAO,cAAc,IAAI,CAAC,OAAO,OAAO,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;IAC1E,CAAC;;AAnDD;;GAEG;AACa,4BAAkB,GAAW,oBAAoB,CAAC;AAClE;;GAEG;AACa,qBAAW,GAAW,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { Sanitizer } from \"./util/sanitizer\";\nimport { WebResourceLike } from \"./webResource\";\nimport { custom } from \"./util/inspect\";\n\nconst errorSanitizer = new Sanitizer();\n\n/**\n * An error resulting from an HTTP request to a service endpoint.\n */\nexport class RestError extends Error {\n /**\n * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.)\n */\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n /**\n * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete.\n */\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n /**\n * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT).\n */\n code?: string;\n /**\n * The HTTP status code of the response, if one was returned.\n */\n statusCode?: number;\n /**\n * Outgoing request.\n */\n request?: WebResourceLike;\n /**\n * Incoming response.\n */\n response?: HttpOperationResponse;\n /**\n * Any additional details. In the case of deserialization errors, can be the processed response.\n */\n details?: unknown;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ) {\n super(message);\n this.name = \"RestError\";\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n\n /**\n * Logging method for util.inspect in Node\n */\n [custom](): string {\n return `RestError: ${this.message} \\n ${errorSanitizer.sanitize(this)}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js b/node_modules/@azure/core-http/dist-esm/src/serializer.js new file mode 100644 index 0000000..434c50a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serializer.js @@ -0,0 +1,918 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* eslint-disable eqeqeq */ +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; +import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export class Serializer { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value != undefined) { + const valueAsNumber = value; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + const valueAsArray = value; + if (MaxItems != undefined && valueAsArray.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && valueAsArray.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && valueAsArray.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && valueAsArray.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper, object, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object == undefined) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper, responseBody, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xmlCharKey; + const castResponseBody = responseBody; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (castResponseBody[XML_ATTRKEY] != undefined && + castResponseBody[xmlCharKey] != undefined) { + responseBody = castResponseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = base64.encodeByteArray(value); + } + return returnValue; +} +function serializeBase64UrlType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = bufferToBase64Url(value) || ""; + } + return returnValue; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!utils.isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by `className`. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + * @param objectName - Name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && + (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName != undefined) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export function serializeObject(toSerialize) { + const castToSerialize = toSerialize; + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + const dictionary = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(castToSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + const result = {}; + for (const key of o) { + result[key] = key; + } + return result; +} +/** + * String enum containing the string types of property mappers. + */ +// eslint-disable-next-line @typescript-eslint/no-redeclare +export const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js.map b/node_modules/@azure/core-http/dist-esm/src/serializer.js.map new file mode 100644 index 0000000..f5bf9fa --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../../src/serializer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,2BAA2B;AAE3B,OAAO,KAAK,MAAM,MAAM,eAAe,CAAC;AACxC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AAEvF,qLAAqL;AAErL;;;;GAIG;AACH,MAAM,OAAO,UAAU;IACrB;IACE;;OAEG;IACa,eAAuC,EAAE;IACzD;;OAEG;IACa,KAAe;QAJf,iBAAY,GAAZ,YAAY,CAA6B;QAIzC,UAAK,GAAL,KAAK,CAAU;IAC9B,CAAC;IAEJ;;;;;;OAMG;IACH,mBAAmB,CAAC,MAAc,EAAE,KAAc,EAAE,UAAkB;QACpE,MAAM,cAAc,GAAG,CACrB,cAAuC,EACvC,eAAoB,EACb,EAAE;YACT,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,iBAAiB,KAAK,oCAAoC,cAAc,MAAM,eAAe,GAAG,CAC/G,CAAC;QACJ,CAAC,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YAC5C,MAAM,aAAa,GAAG,KAAe,CAAC;YACtC,MAAM,EACJ,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,SAAS,EACT,UAAU,EACV,OAAO,EACP,WAAW,GACZ,GAAG,MAAM,CAAC,WAAW,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;gBACtE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;gBACtE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;gBACrE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;gBACrE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,MAAM,YAAY,GAAG,KAAc,CAAC;YACpC,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;gBAC3D,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;gBAC7D,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;gBAC3D,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;gBAC7D,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,aAAa,GAAG,UAAU,KAAK,CAAC,EAAE;gBAC/D,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,MAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,YAAY,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,CAAS,EAAE,EAAc,EAAE,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EACnF;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;IACH,CAAC;IAED;;;;;;;;OAQG;IACH,SAAS,CACP,MAAc,EACd,MAAe,EACf,UAAmB,EACnB,UAA6B,EAAE;;QAE/B,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;YAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;YACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YAC5C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;QAED,mDAAmD;QACnD,sDAAsD;QACtD,mDAAmD;QACnD,wBAAwB;QACxB,iCAAiC;QACjC,0CAA0C;QAC1C,0CAA0C;QAC1C,qCAAqC;QACrC,0CAA0C;QAE1C,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,uBAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,+BAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,kBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;YACL,IAAI,UAAU,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,IAAI,EAAE;gBACvC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,+CAA+C,CAAC,KAAK,IAAI,EAAE;gBACrF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBAC/C,MAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,sDAAsD,CAAC,KAAK,IAAI,EACjF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;aACpE;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;aACpE;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,qBAAqB,CAC7B,IAAI,EACJ,MAAwB,EACxB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAA0B,EAC1B,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAC9B,IAAI,EACJ,MAAyB,EACzB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;SACF;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,MAAc,EACd,YAAqB,EACrB,UAAkB,EAClB,UAA6B,EAAE;;QAE/B,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;YAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;YACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACzE,sEAAsE;gBACtE,qDAAqD;gBACrD,qEAAqE;gBACrE,YAAY,GAAG,EAAE,CAAC;aACnB;YACD,+FAA+F;YAC/F,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,wBAAwB,CAChC,IAAI,EACJ,MAAyB,EACzB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;SACH;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;gBAC7C,MAAM,gBAAgB,GAAG,YAAuC,CAAC;gBACjE;;;;mBAIG;gBACH,IACE,gBAAgB,CAAC,WAAW,CAAC,IAAI,SAAS;oBAC1C,gBAAgB,CAAC,UAAU,CAAC,IAAI,SAAS,EACzC;oBACA,YAAY,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;iBAC7C;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;gBAC1C,OAAO,GAAG,UAAU,CAAC,YAAsB,CAAC,CAAC;gBAC7C,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,kDAAkD,CAAC,KAAK,IAAI,EAAE;gBACxF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,oCAAoC,CAAC,KAAK,IAAI,EAAE;gBAC1E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAsB,CAAC,CAAC;aAC5C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,cAAc,CAAC,YAAsB,CAAC,CAAC;aAClD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,YAAsB,CAAC,CAAC;aACvD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,oBAAoB,CAAC,YAAsB,CAAC,CAAC;aACxD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAAwB,EACxB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;CACF;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,CAAC,CAAC,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;IACD,wBAAwB;IACxB,MAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;IAC3C,uBAAuB;IACvB,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;IACD,uBAAuB;IACvB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;IAChD,wBAAwB;IACxB,OAAO,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE;YAC3B,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YACxC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,eAAe,KAAK,0BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YAC/C,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,gBAAgB,KAAK,2BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YAC7C,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,gBAAgB,KAAK,4CAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,eAAe,KAAK,2BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YAC/C,MAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,CAAC,CAAC,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,CAAC,CAAC,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,OAAO,IAAI,KAAK,QAAQ,CAAC,IAAI,KAAK,YAAY,IAAI,CAAC,EACpF;gBACA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,uGAAuG,CACrH,CAAC;aACH;SACF;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,qDAAqD,UAAU,mBAAmB,CACnF,CAAC;KACH;IACD,MAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE;QAC5C,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;IACxB,CAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,6BAA6B,UAAU,2BAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,GAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,8BAA8B,CAAC,CAAC;SAC9D;QACD,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;KAC7C;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,8BAA8B,CAAC,CAAC;SAC9D;QACD,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;KAC9C;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YACtC,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,4DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;oBACnB,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;oBACtC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,4DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,oBAAoB,CAAC,KAAK,IAAI,EAAE;YACxD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,6DAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,qEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,sDAAsD,KAAK,IAAI,CAC7E,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,yBAAyB,CAAC,CAAC;KACzD;IACD,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,wDAAwD;YACtD,0CAA0C,UAAU,GAAG,CAC1D,CAAC;KACH;IACD,MAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI,KAAK,IAAI,WAAW,CAAC,YAAY,EAAE;YACrC,MAAM,QAAQ,GAAG,WAAW,CAAC,kBAAkB;gBAC7C,CAAC,CAAC,SAAS,WAAW,CAAC,kBAAkB,EAAE;gBAC3C,CAAC,CAAC,OAAO,CAAC;YACZ,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;gBACzC,SAAS,CAAC,CAAC,CAAC,qBAAQ,eAAe,CAAE,CAAC;gBACtC,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,YAAY,EAAE,CAAC;aACtE;iBAAM;gBACL,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;gBAClB,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;gBACnD,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,YAAY,EAAE,CAAC;aACtE;SACF;aAAM;YACL,SAAS,CAAC,CAAC,CAAC,GAAG,eAAe,CAAC;SAChC;KACF;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,0BAA0B,CAAC,CAAC;KAC1D;IACD,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,2DAA2D;YACzD,0CAA0C,UAAU,GAAG,CAC1D,CAAC;KACH;IACD,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACrC,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QAC1F,gFAAgF;QAChF,cAAc,CAAC,GAAG,CAAC,GAAG,iBAAiB,CAAC,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;KACrF;IAED,kDAAkD;IAClD,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;QAE5F,MAAM,MAAM,GAAG,cAAc,CAAC;QAC9B,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,YAAY,EAAE,CAAC;QAC1D,OAAO,MAAM,CAAC;KACf;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACH,SAAS,2BAA2B,CAClC,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,MAAM,oBAAoB,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IAE9D,IAAI,CAAC,oBAAoB,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE;QAClD,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,OAAO,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,IAAI,CAAC,oBAAoB,CAAC;KAC/C;IAED,OAAO,oBAAoB,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;IACxC,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACb,yBAAyB,UAAU,oCAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,IAAI,CACN,CAAC;KACH;IAED,OAAO,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AAC5C,CAAC;AAED;;;;GAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,mDAAmD,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,CAAC;SAC/F;QACD,UAAU,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,IAAI,CAAC,eAAe,CAAC;QAC/C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;gBACnD,WAAW,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,cACpC,MAAM,CAAC,IAAI,CAAC,SACd,iBAAiB,UAAU,IAAI,CAClC,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,MAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;YACzC,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAA4B,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,MAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;oBAC5B,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IACE,WAAW,IAAI,SAAS;wBACxB,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,CAAC,EACvE;wBACA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;oBAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB;wBACxC,CAAC,CAAC,SAAS,MAAM,CAAC,kBAAkB,EAAE;wBACtC,CAAC,CAAC,OAAO,CAAC;oBACZ,YAAY,CAAC,WAAW,CAAC,mCACpB,YAAY,CAAC,WAAW,CAAC,KAC5B,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,YAAY,GAChC,CAAC;iBACH;gBACD,MAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;oBAClC,CAAC,CAAC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;oBAClD,CAAC,CAAC,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;gBAEF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,MAAM,KAAK,GAAG,iBAAiB,CAAC,cAAc,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;oBACjF,IAAI,KAAK,IAAI,cAAc,CAAC,cAAc,EAAE;wBAC1C,uEAAuE;wBACvE,2DAA2D;wBAC3D,gCAAgC;wBAChC,YAAY,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;wBAC5D,YAAY,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBACvD;yBAAM,IAAI,KAAK,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/C,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,cAAe,CAAC,EAAE,KAAK,EAAE,CAAC;qBACtE;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC;qBAChC;iBACF;aACF;SACF;QAED,MAAM,0BAA0B,GAAG,2BAA2B,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC/F,IAAI,0BAA0B,EAAE;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC1C,KAAK,MAAM,cAAc,IAAI,MAAM,EAAE;gBACnC,MAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,KAAK,cAAc,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,EACzC,OAAO,CACR,CAAC;iBACH;aACF;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,iBAAiB,CACxB,cAAsB,EACtB,eAAoB,EACpB,KAAc,EACd,OAAoC;IAEpC,IAAI,CAAC,KAAK,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;QAC1C,OAAO,eAAe,CAAC;KACxB;IAED,MAAM,QAAQ,GAAG,cAAc,CAAC,kBAAkB;QAChD,CAAC,CAAC,SAAS,cAAc,CAAC,kBAAkB,EAAE;QAC9C,CAAC,CAAC,OAAO,CAAC;IACZ,MAAM,YAAY,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,cAAc,CAAC,YAAY,EAAE,CAAC;IAEjE,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACpD,IAAI,eAAe,CAAC,WAAW,CAAC,EAAE;YAChC,OAAO,eAAe,CAAC;SACxB;aAAM;YACL,MAAM,MAAM,qBAAa,eAAe,CAAE,CAAC;YAC3C,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;YACnC,OAAO,MAAM,CAAC;SACf;KACF;IACD,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;IAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;IACnC,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB,EAAE,OAAoC;IACtF,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAClE,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;;IAEpC,MAAM,UAAU,GAAG,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW,CAAC;IACrD,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,MAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;QACzC,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,EAAE,cAAc,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,cAAc,CAAC;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,MAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,MAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAK,MAAM,SAAS,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;gBACjD,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,WAAW,CAAC,EAAE;gBAC9D,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,WAAW,CAAC,CAAC,OAAQ,CAAC,EACnC,kBAAkB,EAClB,OAAO,CACR,CAAC;aACH;iBAAM,IAAI,cAAc,CAAC,WAAW,EAAE;gBACrC,IAAI,YAAY,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;oBAC1C,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;iBAC1C;qBAAM,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;oBAC3C,+EAA+E;oBAC/E,kEAAkE;oBAClE,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC;iBAC9B;aACF;iBAAM;gBACL,MAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B;;;;;;;;;;;;;sBAaE;oBACF,MAAM,OAAO,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBACvC,MAAM,WAAW,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,cAAe,CAAC,mCAAI,EAAE,CAAC;oBACrD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;oBACF,oBAAoB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;iBACrC;qBAAM;oBACL,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;oBAC7C,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,OAAO,CACR,CAAC;oBACF,oBAAoB,CAAC,IAAI,CAAC,YAAa,CAAC,CAAC;iBAC1C;aACF;SACF;aAAM;YACL,kFAAkF;YAClF,IAAI,gBAAgB,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;YACvB,sCAAsC;YACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,MAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;YACtE,sEAAsE;YACtE,yEAAyE;YACzE,kFAAkF;YAClF,kFAAkF;YAClF,gGAAgG;YAChG,8FAA8F;YAC9F,qFAAqF;YACrF,mFAAmF;YACnF,sFAAsF;YACtF,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,CAAC;YACpB,SAAS;YACT,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,MAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;gBACF,yFAAyF;gBACzF,6CAA6C;gBAC7C,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;oBAC7C,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;wBAC3D,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;qBACtB;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,MAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,EAAW,EAAE;YACjE,KAAK,MAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC;QAEF,KAAK,MAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,EAC3C,OAAO,CACR,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;YAC3C,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,EAAE,OAAO,CAAC,EACnC;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;IAEpC,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,2DAA2D;YACzD,0CAA0C,UAAU,EAAE,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,MAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;YAC3C,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;SAC7F;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;IAEpC,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,wDAAwD;YACtD,0CAA0C,UAAU,EAAE,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;YAChC,+FAA+F;YAC/F,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,MAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACnC,OAAO,EACP,YAAY,CAAC,CAAC,CAAC,EACf,GAAG,UAAU,IAAI,CAAC,GAAG,EACrB,OAAO,CACR,CAAC;SACH;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,MAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,MAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;oBAC7B,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,MAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,OAAO,CACL,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CACrE,CAAC;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,OAAO,CACL,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAChE,CAAC;AACJ,CAAC;AA6TD;;GAEG;AACH,MAAM,UAAU,eAAe,CAAC,WAAoB;IAClD,MAAM,eAAe,GAAG,WAAsC,CAAC;IAC/D,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,MAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,MAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,MAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;SACnE;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CAAmB,CAAW;IAC5C,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAK,MAAM,GAAG,IAAI,CAAC,EAAE;QACnB,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,2DAA2D;AAC3D,MAAM,CAAC,MAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/* eslint-disable eqeqeq */\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\n\n// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications.\n\n/**\n * Used to map raw response objects to final shapes.\n * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON.\n * Also allows pulling values from headers, as well as inserting default values and constants.\n */\nexport class Serializer {\n constructor(\n /**\n * The provided model mapper.\n */\n public readonly modelMappers: { [key: string]: any } = {},\n /**\n * Whether the contents are XML or not.\n */\n public readonly isXML?: boolean\n ) {}\n\n /**\n * Validates constraints, if any. This function will throw if the provided value does not respect those constraints.\n * @param mapper - The definition of data models.\n * @param value - The value.\n * @param objectName - Name of the object. Used in the error messages.\n * @deprecated Removing the constraints validation on client side.\n */\n validateConstraints(mapper: Mapper, value: unknown, objectName: string): void {\n const failValidation = (\n constraintName: keyof MapperConstraints,\n constraintValue: any\n ): Error => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const valueAsNumber = value as number;\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n const valueAsArray = value as any[];\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n valueAsArray.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param object - A valid Javascript object to be serialized.\n * @param objectName - Name of the serialized object.\n * @param options - additional options to deserialization.\n * @returns A valid serialized Javascript object.\n */\n serialize(\n mapper: Mapper,\n object: unknown,\n objectName?: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/i) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(\n this,\n mapper as SequenceMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(\n this,\n mapper as CompositeMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param responseBody - A valid Javascript entity to be deserialized.\n * @param objectName - Name of the deserialized object.\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object.\n */\n deserialize(\n mapper: Mapper,\n responseBody: unknown,\n objectName: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(\n this,\n mapper as CompositeMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else {\n if (this.isXML) {\n const xmlCharKey = updatedOptions.xmlCharKey;\n const castResponseBody = responseBody as Record;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (\n castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined\n ) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody as string);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody as string);\n } else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody as number);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = base64.decodeString(responseBody as string);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody as string);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(\n this,\n mapper as SequenceMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string): string {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/i) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = base64.encodeByteArray(value);\n }\n return returnValue;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string): any {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any[] {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n const serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n\n if (isXml && elementType.xmlNamespace) {\n const xmlnsKey = elementType.xmlNamespacePrefix\n ? `xmlns:${elementType.xmlNamespacePrefix}`\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = { ...serializedValue };\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n } else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n }\n } else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): { [key: string]: any } {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n const serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : \"xmlns\";\n\n const result = tempDictionary;\n result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };\n return result;\n }\n\n return tempDictionary;\n}\n\n/**\n * Resolves the additionalProperties property from a referenced mapper.\n * @param serializer - The serializer containing the entire set of mappers.\n * @param mapper - The composite mapper to resolve.\n * @param objectName - Name of the object being serialized.\n */\nfunction resolveAdditionalProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): SequenceMapper | BaseMapper | CompositeMapper | DictionaryMapper | EnumMapper | undefined {\n const additionalProperties = mapper.type.additionalProperties;\n\n if (!additionalProperties && mapper.type.className) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper?.type.additionalProperties;\n }\n\n return additionalProperties;\n}\n\n/**\n * Finds the mapper referenced by `className`.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): CompositeMapper | undefined {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n return serializer.modelMappers[className];\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${mapper.type.className}\".`);\n }\n modelProps = modelMapper?.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(modelMapper)}\" of type \"${\n mapper.type.className\n }\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (\n childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)\n ) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix\n ? `xmlns:${mapper.xmlNamespacePrefix}`\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = {\n ...parentObject[XML_ATTRKEY],\n [xmlnsKey]: mapper.xmlNamespace,\n };\n }\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName,\n options\n );\n\n if (serializedValue !== undefined && propName != undefined) {\n const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n } else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: value };\n } else {\n parentObject[propName] = value;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]',\n options\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction getXmlObjectValue(\n propertyMapper: Mapper,\n serializedValue: any,\n isXml: boolean,\n options: Required\n): any {\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n\n const xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? `xmlns:${propertyMapper.xmlNamespacePrefix}`\n : \"xmlns\";\n const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };\n\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n } else {\n const result: any = { ...serializedValue };\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n }\n }\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\n\nfunction isSpecialXmlProperty(propertyName: string, options: Required): boolean {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any {\n const xmlCharKey = options.xmlCharKey ?? XML_CHARKEY;\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName,\n options\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody[XML_ATTRKEY][xmlName!],\n propertyObjectName,\n options\n );\n } else if (propertyMapper.xmlIsMsText) {\n if (responseBody[xmlCharKey] !== undefined) {\n instance[key] = responseBody[xmlCharKey];\n } else if (typeof responseBody === \"string\") {\n // The special case where xml parser parses \"content\" into JSON of\n // `{ name: \"content\"}` instead of `{ name: { \"_\": \"content\" }}`\n instance[key] = responseBody;\n }\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n const wrapped = responseBody[xmlName!];\n const elementList = wrapped?.[xmlElementName!] ?? [];\n instance[key] = serializer.deserialize(\n propertyMapper,\n elementList,\n propertyObjectName,\n options\n );\n handledPropertyNames.push(xmlName!);\n } else {\n const property = responseBody[propertyName!];\n instance[key] = serializer.deserialize(\n propertyMapper,\n property,\n propertyObjectName,\n options\n );\n handledPropertyNames.push(propertyName!);\n }\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [k, v] of Object.entries(instance)) {\n if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {\n arrayInstance[k] = v;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string): boolean => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]',\n options\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): { [key: string]: any } {\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any[] {\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(\n element,\n responseBody[i],\n `${objectName}[${i}]`,\n options\n );\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string): any {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\n/**\n * Description of various value constraints such as integer ranges and string regex.\n */\nexport interface MapperConstraints {\n /**\n * The value should be less than or equal to the `InclusiveMaximum` value.\n */\n InclusiveMaximum?: number;\n /**\n * The value should be less than the `ExclusiveMaximum` value.\n */\n ExclusiveMaximum?: number;\n /**\n * The value should be greater than or equal to the `InclusiveMinimum` value.\n */\n InclusiveMinimum?: number;\n /**\n * The value should be greater than the `InclusiveMinimum` value.\n */\n ExclusiveMinimum?: number;\n /**\n * The length should be smaller than the `MaxLength`.\n */\n MaxLength?: number;\n /**\n * The length should be bigger than the `MinLength`.\n */\n MinLength?: number;\n /**\n * The value must match the pattern.\n */\n Pattern?: RegExp;\n /**\n * The value must contain fewer items than the MaxItems value.\n */\n MaxItems?: number;\n /**\n * The value must contain more items than the `MinItems` value.\n */\n MinItems?: number;\n /**\n * The value must contain only unique items.\n */\n UniqueItems?: true;\n /**\n * The value should be exactly divisible by the `MultipleOf` value.\n */\n MultipleOf?: number;\n}\n\n/**\n * Type of the mapper. Includes known mappers.\n */\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\n/**\n * The type of a simple mapper.\n */\nexport interface SimpleMapperType {\n /**\n * Name of the type of the property.\n */\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\n/**\n * Helps build a mapper that describes how to map a set of properties of an object based on other mappers.\n *\n * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`.\n */\nexport interface CompositeMapperType {\n /**\n * Name of the composite mapper type.\n */\n name: \"Composite\";\n\n /**\n * Use `className` to reference another type definition.\n */\n className?: string;\n\n /**\n * Use `modelProperties` when the reference to the other type has been resolved.\n */\n modelProperties?: { [propertyName: string]: Mapper };\n\n /**\n * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object.\n */\n additionalProperties?: Mapper;\n\n /**\n * The name of the top-most parent scheme, the one that has no parents.\n */\n uberParent?: string;\n\n /**\n * A polymorphic discriminator.\n */\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\n/**\n * Helps build a mapper that describes how to parse a sequence of mapped values.\n */\nexport interface SequenceMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Sequence\";\n /**\n * The mapper to use to map each one of the properties of the sequence.\n */\n element: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse a dictionary of mapped values.\n */\nexport interface DictionaryMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Dictionary\";\n /**\n * The mapper to use to map the value of each property in the dictionary.\n */\n value: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse an enum value.\n */\nexport interface EnumMapperType {\n /**\n * Name of the enum type mapper.\n */\n name: \"Enum\";\n /**\n * Values allowed by this mapper.\n */\n allowedValues: any[];\n}\n\n/**\n * The base definition of a mapper. Can be used for XML and plain JavaScript objects.\n */\nexport interface BaseMapper {\n /**\n * Name for the xml element\n */\n xmlName?: string;\n /**\n * Xml element namespace\n */\n xmlNamespace?: string;\n /**\n * Xml element namespace prefix\n */\n xmlNamespacePrefix?: string;\n /**\n * Determines if the current property should be serialized as an attribute of the parent xml element\n */\n xmlIsAttribute?: boolean;\n /**\n * Determines if the current property should be serialized as the inner content of the xml element\n */\n xmlIsMsText?: boolean;\n /**\n * Name for the xml elements when serializing an array\n */\n xmlElementName?: string;\n /**\n * Whether or not the current property should have a wrapping XML element\n */\n xmlIsWrapped?: boolean;\n /**\n * Whether or not the current property is readonly\n */\n readOnly?: boolean;\n /**\n * Whether or not the current property is a constant\n */\n isConstant?: boolean;\n /**\n * Whether or not the current property is required\n */\n required?: boolean;\n /**\n * Whether or not the current property allows mull as a value\n */\n nullable?: boolean;\n /**\n * The name to use when serializing\n */\n serializedName?: string;\n /**\n * Type of the mapper\n */\n type: MapperType;\n /**\n * Default value when one is not explicitly provided\n */\n defaultValue?: any;\n /**\n * Constraints to test the current value against\n */\n constraints?: MapperConstraints;\n}\n\n/**\n * Mappers are definitions of the data models used in the library.\n * These data models are part of the Operation or Client definitions in the responses or parameters.\n */\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\n/**\n * Used to disambiguate discriminated type unions.\n * For example, if response can have many shapes but also includes a 'kind' field (or similar),\n * that field can be used to determine how to deserialize the response to the correct type.\n */\nexport interface PolymorphicDiscriminator {\n /**\n * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`.\n */\n serializedName: string;\n /**\n * Name to use on the resulting object instead of the original property name.\n * Useful since the JSON property could be difficult to work with.\n * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`.\n */\n clientName: string;\n /**\n * It may contain any other property.\n */\n [key: string]: string;\n}\n\n/**\n * A mapper composed of other mappers.\n */\nexport interface CompositeMapper extends BaseMapper {\n /**\n * The type descriptor of the `CompositeMapper`.\n */\n type: CompositeMapperType;\n}\n\n/**\n * A mapper describing arrays.\n */\nexport interface SequenceMapper extends BaseMapper {\n /**\n * The type descriptor of the `SequenceMapper`.\n */\n type: SequenceMapperType;\n}\n\n/**\n * A mapper describing plain JavaScript objects used as key/value pairs.\n */\nexport interface DictionaryMapper extends BaseMapper {\n /**\n * The type descriptor of the `DictionaryMapper`.\n */\n type: DictionaryMapperType;\n /**\n * Optionally, a prefix to add to the header collection.\n */\n headerCollectionPrefix?: string;\n}\n\n/**\n * A mapper describing an enum value.\n */\nexport interface EnumMapper extends BaseMapper {\n /**\n * The type descriptor of the `EnumMapper`.\n */\n type: EnumMapperType;\n}\n\n/**\n * An interface representing an URL parameter value.\n */\nexport interface UrlParameterValue {\n /**\n * The URL value.\n */\n value: string;\n /**\n * Whether to keep or skip URL encoding.\n */\n skipUrlEncoding: boolean;\n}\n\n/**\n * Utility function that serializes an object that might contain binary information into a plain object, array or a string.\n */\nexport function serializeObject(toSerialize: unknown): any {\n const castToSerialize = toSerialize as Record;\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\n/**\n * String enum containing the string types of property mappers.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js new file mode 100644 index 0000000..13e5074 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js @@ -0,0 +1,619 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as utils from "./util/utils"; +import { MapperType } from "./serializer"; +import { DefaultDeserializationOptions, deserializationPolicy, } from "./policies/deserializationPolicy"; +import { DefaultKeepAliveOptions, keepAlivePolicy } from "./policies/keepAlivePolicy"; +import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { DefaultRetryOptions, exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { logPolicy } from "./policies/logPolicy"; +import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; +import { getStreamResponseStatusCodes } from "./operationSpec"; +import { WebResource, isWebResourceLike, } from "./webResource"; +import { RequestPolicyOptions, } from "./policies/requestPolicy"; +import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +import { isNode } from "@azure/core-util"; +import { isTokenCredential } from "@azure/core-auth"; +import { getDefaultUserAgentHeaderName, getDefaultUserAgentValue, userAgentPolicy, } from "./policies/userAgentPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { URLBuilder } from "./url"; +import { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +import { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { getCachedDefaultHttpClient } from "./httpClientCache"; +import { logger } from "./log"; +import { ndJsonPolicy } from "./policies/ndJsonPolicy"; +import { proxyPolicy } from "./policies/proxyPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { stringifyXML } from "./util/xml"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { tracingPolicy } from "./policies/tracingPolicy"; +/** + * ServiceClient sends service requests and receives responses. + */ +export class ServiceClient { + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials, + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ + options) { + if (!options) { + options = {}; + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + let requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + logger.info("ServiceClient: using custom request policies"); + requestPolicyFactories = options.requestPolicyFactories; + } + else { + let authPolicyFactory = undefined; + if (isTokenCredential(credentials)) { + logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); + // Create a wrapped RequestPolicyFactory here so that we can provide the + // correct scope to the BearerTokenAuthenticationPolicy at the first time + // one is requested. This is needed because generated ServiceClient + // implementations do not set baseUri until after ServiceClient's constructor + // is finished, leaving baseUri empty at the time when it is needed to + // build the correct scope name. + const wrappedPolicyFactory = () => { + let bearerTokenPolicyFactory = undefined; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const serviceClient = this; + const serviceClientOptions = options; + return { + create(nextPolicy, createOptions) { + const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + if (!credentialScopes) { + throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + } + if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { + bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); + } + return bearerTokenPolicyFactory.create(nextPolicy, createOptions); + }, + }; + }; + authPolicyFactory = wrappedPolicyFactory(); + } + else if (credentials && typeof credentials.signRequest === "function") { + logger.info("ServiceClient: creating signing policy from provided credentials"); + authPolicyFactory = signingPolicy(credentials); + } + else if (credentials !== undefined && credentials !== null) { + throw new Error("The credentials argument must implement the TokenCredential interface"); + } + logger.info("ServiceClient: using default request policies"); + requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); + if (options.requestPolicyFactories) { + // options.requestPolicyFactories can also be a function that manipulates + // the default requestPolicyFactories array + const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + sendRequest(options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + let httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + let httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + async sendOperationRequest(operationArguments, operationSpec, callback) { + var _a; + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const httpRequest = new WebResource(); + let result; + try { + const baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + const requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); + } + let rawResponse; + let sendRequestError; + try { + rawResponse = await this.sendRequest(httpRequest); + } + catch (error) { + sendRequestError = error; + } + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); + } + result = Promise.reject(sendRequestError); + } + else { + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); + } + } + catch (error) { + result = Promise.reject(error); + } + const cb = callback; + if (cb) { + result + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + return result; + } +} +export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + var _a, _b, _c, _d, _e, _f; + const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + const updatedOptions = { + rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", + includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, + }; + const xmlCharKey = serializerOptions.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(utils.prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperType.String && + (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + let result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(authPolicyFactory, options) { + const factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (authPolicyFactory) { + factories.push(authPolicyFactory); + } + const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + factories.push(redirectPolicy()); + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + if (isNode) { + factories.push(proxyPolicy(options.proxySettings)); + } + factories.push(logPolicy({ logger: logger.info })); + return factories; +} +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { + const requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } + let userAgentValue = undefined; + if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { + const userAgentInfo = []; + userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); + // Add the default user agent value if it isn't already specified + // by the userAgentPrefix option. + const defaultUserAgentInfo = getDefaultUserAgentValue(); + if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { + userAgentInfo.push(defaultUserAgentInfo); + } + userAgentValue = userAgentInfo.join(" "); + } + const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + if (isNode) { + requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); + } + const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); + if (redirectOptions.handleRedirects) { + requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); + } + if (authPolicyFactory) { + requestPolicyFactories.push(authPolicyFactory); + } + requestPolicyFactories.push(logPolicy(loggingOptions)); + if (isNode && pipelineOptions.decompressResponse === false) { + requestPolicyFactories.push(disableResponseDecompressionPolicy()); + } + return { + httpClient: pipelineOptions.httpClient, + requestPolicyFactories, + }; +} +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent, propertyPath) { + if (parent && propertyPath) { + const propertyPathLength = propertyPath.length; + for (let i = 0; i < propertyPathLength - 1; ++i) { + const propertyName = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var _a; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); + if (propertyValue !== undefined && propertyValue !== null) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent !== undefined && parent !== null && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export function flattenResponse(_response, responseSpec) { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const addOperationResponse = (obj) => { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (typeName === "Sequence" || isPageableResponse) { + const arrayResponse = [...(_response.parsedBody || [])]; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); +} +function getCredentialScopes(options, baseUri) { + if (options === null || options === void 0 ? void 0 : options.credentialScopes) { + return options.credentialScopes; + } + if (baseUri) { + return `${baseUri}/.default`; + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map new file mode 100644 index 0000000..41f8144 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.js","sourceRoot":"","sources":["../../src/serviceClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAA6C,UAAU,EAAc,MAAM,cAAc,CAAC;AACjG,OAAO,EACL,6BAA6B,EAE7B,qBAAqB,GACtB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,uBAAuB,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AACtF,OAAO,EAAE,sBAAsB,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACnF,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAEhG,OAAO,EAAoB,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EAGL,0BAA0B,EAC1B,8BAA8B,GAC/B,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAiB,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EAGL,WAAW,EAEX,iBAAiB,GAClB,MAAM,eAAe,CAAC;AACvB,OAAO,EAGL,oBAAoB,GACrB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AAEvF,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAmB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACtE,OAAO,EACL,6BAA6B,EAC7B,wBAAwB,EACxB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAMpC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACnC,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAC/D,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AACvD,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAC1C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAmGzD;;GAEG;AACH,MAAM,OAAO,aAAa;IAsBxB;;;;OAIG;IACH,YACE,WAAwD;IACxD,iEAAiE;IACjE,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,CAAC;QACtE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,MAAM,CAAC,IAAI,CAAC,8CAA8C,CAAC,CAAC;YAC5D,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,IAAI,iBAAiB,GAAqC,SAAS,CAAC;YACpE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;gBAClC,MAAM,CAAC,IAAI,CACT,sFAAsF,CACvF,CAAC;gBACF,wEAAwE;gBACxE,yEAAyE;gBACzE,oEAAoE;gBACpE,6EAA6E;gBAC7E,sEAAsE;gBACtE,gCAAgC;gBAChC,MAAM,oBAAoB,GAA+B,GAAG,EAAE;oBAC5D,IAAI,wBAAwB,GAAqC,SAAS,CAAC;oBAC3E,4DAA4D;oBAC5D,MAAM,aAAa,GAAG,IAAI,CAAC;oBAC3B,MAAM,oBAAoB,GAAG,OAAO,CAAC;oBACrC,OAAO;wBACL,MAAM,CAAC,UAAyB,EAAE,aAAmC;4BACnE,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,oBAAoB,EACpB,aAAa,CAAC,OAAO,CACtB,CAAC;4BAEF,IAAI,CAAC,gBAAgB,EAAE;gCACrB,MAAM,IAAI,KAAK,CACb,mKAAmK,CACpK,CAAC;6BACH;4BAED,IAAI,wBAAwB,KAAK,SAAS,IAAI,wBAAwB,KAAK,IAAI,EAAE;gCAC/E,wBAAwB,GAAG,+BAA+B,CACxD,WAAW,EACX,gBAAgB,CACjB,CAAC;6BACH;4BAED,OAAO,wBAAwB,CAAC,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;wBACpE,CAAC;qBACF,CAAC;gBACJ,CAAC,CAAC;gBAEF,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;aAC5C;iBAAM,IAAI,WAAW,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;gBACvE,MAAM,CAAC,IAAI,CAAC,kEAAkE,CAAC,CAAC;gBAChF,iBAAiB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;aAChD;iBAAM,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;gBAC5D,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;aAC1F;YAED,MAAM,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC;YAC7D,sBAAsB,GAAG,mCAAmC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;YACzF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,yEAAyE;gBACzE,2CAA2C;gBAC3C,MAAM,yBAAyB,GAC7B,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACzD,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,MAAM,iBAAiB,GAAG,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,CAAC;QACxE,MAAM,WAAW,GAAoB,IAAI,WAAW,EAAE,CAAC;QAEvD,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,MAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,MAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAAK,MAAM,YAAY,IAAI,aAAa,CAAC,aAAa,EAAE;oBACtD,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,EACxC,iBAAiB,CAClB,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,IAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,GAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAAK,MAAM,cAAc,IAAI,aAAa,CAAC,eAAe,EAAE;oBAC1D,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,KAAK,SAAS,IAAI,mBAAmB,KAAK,IAAI,EAAE;wBACrE,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,EAC1C,iBAAiB,CAClB,CAAC;wBACF,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI,EACxC;4BACA,IAAI,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,6EAA6E;oCAC7E,SAAS;iCACV;qCAAM;oCACL,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,MAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC;4CACxB,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;qCAC9D;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI;4BACxC,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,MAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,IAAI,aAAa,CAAC,WAAW,EAAE;gBAC5C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAAK,MAAM,eAAe,IAAI,aAAa,CAAC,gBAAgB,EAAE;oBAC5D,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;wBACrD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,EAC3C,iBAAiB,CAClB,CAAC;wBACF,MAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;gCAC1C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,MAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,MAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,+HAA+H;oBAC9H,WAAmB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBACxD;gBAED,IAAI,OAAO,CAAC,cAAc,EAAE;oBAC1B,WAAW,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;iBACrD;gBAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,SAAS,IAAI,OAAO,CAAC,iBAAiB,KAAK,IAAI,EAAE;oBACjF,WAAW,CAAC,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC;iBAC3D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,yBAAyB,KAAK,SAAS,EAAE;gBACvD,WAAW,CAAC,yBAAyB,GAAG,4BAA4B,CAAC,aAAa,CAAC,CAAC;aACrF;YAED,IAAI,WAAkC,CAAC;YACvC,IAAI,gBAAgB,CAAC;YACrB,IAAI;gBACF,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;aACnD;YAAC,OAAO,KAAU,EAAE;gBACnB,gBAAgB,GAAG,KAAK,CAAC;aAC1B;YACD,IAAI,gBAAgB,EAAE;gBACpB,IAAI,gBAAgB,CAAC,QAAQ,EAAE;oBAC7B,gBAAgB,CAAC,OAAO,GAAG,eAAe,CACxC,gBAAgB,CAAC,QAAQ,EACzB,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,UAAU,CAAC;wBAClD,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CACrC,CAAC;iBACH;gBACD,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;aAC3C;iBAAM;gBACL,MAAM,GAAG,OAAO,CAAC,OAAO,CACtB,eAAe,CAAC,WAAY,EAAE,aAAa,CAAC,SAAS,CAAC,WAAY,CAAC,MAAM,CAAC,CAAC,CAC5E,CAAC;aACH;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,MAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;iBACH,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC;iBACvF,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED,MAAM,UAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;;IAE5B,MAAM,iBAAiB,GAAG,MAAA,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,mCAAI,EAAE,CAAC;IAC9E,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,iBAAiB,CAAC,QAAQ,mCAAI,EAAE;QAC1C,WAAW,EAAE,MAAA,iBAAiB,CAAC,WAAW,mCAAI,KAAK;QACnD,UAAU,EAAE,MAAA,iBAAiB,CAAC,UAAU,mCAAI,WAAW;KACxD,CAAC;IAEF,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;IAChD,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QACpD,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAC3F,UAAU,CAAC;QACb,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QAEtC,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,QAAQ,EAAE;gBAC7E,MAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,EAC9B,cAAc,CACf,CAAC;gBAEF,MAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAEhD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,SAAS,kBAAkB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;oBAC9E,MAAM,KAAK,GAAG,wBAAwB,CACpC,YAAY,EACZ,QAAQ,EACR,QAAQ,EACR,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;oBACF,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7B,KAAK,CAAC,kBAAkB,CACtB,KAAK,EACL,cAAc,IAAI,OAAO,IAAI,cAAe,EAC5C,QAAQ,EACR,YAAY,CACb,EACD;4BACE,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;yBACX,CACF,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,KAAK,EAAE;4BACrC,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;yBACX,CAAC,CAAC;qBACJ;iBACF;qBAAM,IACL,QAAQ,KAAK,UAAU,CAAC,MAAM;oBAC9B,CAAC,CAAA,MAAA,aAAa,CAAC,WAAW,0CAAE,KAAK,CAAC,YAAY,CAAC,KAAI,aAAa,CAAC,SAAS,KAAK,MAAM,CAAC,EACtF;oBACA,oEAAoE;oBACpE,2BAA2B;oBAC3B,OAAO;iBACR;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CACb,UAAU,KAAK,CAAC,OAAO,2CAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,GAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAK,MAAM,iBAAiB,IAAI,aAAa,CAAC,kBAAkB,EAAE;YAChE,MAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,KAAK,SAAS,IAAI,sBAAsB,KAAK,IAAI,EAAE;gBAC3E,MAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,EAC7C,cAAc,CACf,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED;;GAEG;AACH,SAAS,wBAAwB,CAC/B,YAAgC,EAChC,QAAgB,EAChB,QAAgB,EAChB,eAAoB,EACpB,OAAoC;IAEpC,2FAA2F;IAC3F,sDAAsD;IACtD,IAAI,YAAY,IAAI,CAAC,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC/E,MAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,YAAY,EAAE,CAAC;QACnD,OAAO,MAAM,CAAC;KACf;IAED,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,iBAAmD,EACnD,OAA6B;IAE7B,MAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,iBAAiB,EAAE;QACrB,SAAS,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;KACnC;IAED,MAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,MAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IACD,SAAS,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;IACjC,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAI,MAAM,EAAE;QACV,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;IAEnD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,yBAAyB,CACvC,eAAwC,EACxC,iBAAwC;IAExC,MAAM,sBAAsB,GAA2B,EAAE,CAAC;IAE1D,IAAI,eAAe,CAAC,iBAAiB,EAAE;QACrC,sBAAsB,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;KAC7C;IAED,IAAI,cAAc,GAAG,SAAS,CAAC;IAC/B,IAAI,eAAe,CAAC,gBAAgB,IAAI,eAAe,CAAC,gBAAgB,CAAC,eAAe,EAAE;QACxF,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;QAErE,iEAAiE;QACjE,iCAAiC;QACjC,MAAM,oBAAoB,GAAG,wBAAwB,EAAE,CAAC;QACxD,IAAI,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE;YACtD,aAAa,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SAC1C;QAED,cAAc,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC1C;IAED,MAAM,gBAAgB,mCACjB,uBAAuB,GACvB,eAAe,CAAC,gBAAgB,CACpC,CAAC;IAEF,MAAM,YAAY,mCACb,mBAAmB,GACnB,eAAe,CAAC,YAAY,CAChC,CAAC;IAEF,MAAM,eAAe,mCAChB,sBAAsB,GACtB,eAAe,CAAC,eAAe,CACnC,CAAC;IAEF,IAAI,MAAM,EAAE;QACV,sBAAsB,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;KACxE;IAED,MAAM,sBAAsB,mCACvB,6BAA6B,GAC7B,eAAe,CAAC,sBAAsB,CAC1C,CAAC;IAEF,MAAM,cAAc,qBACf,eAAe,CAAC,cAAc,CAClC,CAAC;IAEF,sBAAsB,CAAC,IAAI,CACzB,aAAa,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,EAC5C,eAAe,CAAC,gBAAgB,CAAC,EACjC,eAAe,CAAC,EAAE,KAAK,EAAE,cAAc,EAAE,CAAC,EAC1C,6BAA6B,EAAE,EAC/B,qBAAqB,CAAC,sBAAsB,CAAC,oBAAoB,CAAC,EAClE,qBAAqB,EAAE,EACvB,sBAAsB,EAAE,EACxB,sBAAsB,CACpB,YAAY,CAAC,UAAU,EACvB,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB,CAC/B,CACF,CAAC;IAEF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,sBAAsB,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KACzE;IAED,IAAI,iBAAiB,EAAE;QACrB,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;KAChD;IAED,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC;IAEvD,IAAI,MAAM,IAAI,eAAe,CAAC,kBAAkB,KAAK,KAAK,EAAE;QAC1D,sBAAsB,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;KACnE;IAED,OAAO;QACL,UAAU,EAAE,eAAe,CAAC,UAAU;QACtC,sBAAsB;KACvB,CAAC;AACJ,CAAC;AAID;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAsB,EAAE,YAAsB;IAC9E,IAAI,MAAM,IAAI,YAAY,EAAE;QAC1B,MAAM,kBAAkB,GAAW,YAAY,CAAC,MAAM,CAAC;QACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/C,MAAM,YAAY,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;gBACzB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;aAC3B;YACD,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;SAC/B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,MAAM,iBAAiB,GAAG,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,CAAC;IACxE,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;4BACxB,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC;aAC7F;YAED,0CAA0C;YAC1C,MAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;SACtF;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,MAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,MAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,MAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;YACF,0CAA0C;YAC1C,MAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,EAAE,iBAAiB,CAAC,CAAC;YAC3F,IAAI,aAAa,KAAK,SAAS,IAAI,aAAa,KAAK,IAAI,EAAE;gBACzD,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,MAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,MAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;QACnD,8EAA8E;QAC9E,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,iBAAiB,IAAI,MAAM,EAAE;YAC1E,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,MAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,MAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,MAAM,oBAAoB,GAAG,CAC3B,GAAM,EAGN,EAAE;QACF,OAAO,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YAC7C,KAAK,EAAE,SAAS;SACjB,CAEA,CAAC;IACJ,CAAC,CAAC;IAEF,IAAI,UAAU,EAAE;QACd,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,iCACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,MAAM,eAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC;QAC3F,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,IAAI,CAC1D,CAAC,CAAC,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;YACjD,MAAM,aAAa,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,CAAyB,CAAC;YAEhF,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE;gBAC9C,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE;oBAC5C,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,iCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnC,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;QACA,yCAAyC;QACzC,OAAO,oBAAoB,iCACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,iCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,OAA8B,EAC9B,OAAgB;IAEhB,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;QAC7B,OAAO,OAAO,CAAC,gBAAgB,CAAC;KACjC;IAED,IAAI,OAAO,EAAE;QACX,OAAO,GAAG,OAAO,WAAW,CAAC;KAC9B;IACD,OAAO,SAAS,CAAC;AACnB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"./util/utils\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport {\n DefaultDeserializationOptions,\n DeserializationContentTypes,\n deserializationPolicy,\n} from \"./policies/deserializationPolicy\";\nimport { DefaultKeepAliveOptions, keepAlivePolicy } from \"./policies/keepAlivePolicy\";\nimport { DefaultRedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport { DefaultRetryOptions, exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nimport {\n OperationParameter,\n ParameterPath,\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n} from \"./operationParameter\";\nimport { OperationSpec, getStreamResponseStatusCodes } from \"./operationSpec\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResource,\n WebResourceLike,\n isWebResourceLike,\n} from \"./webResource\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./policies/requestPolicy\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { isNode } from \"@azure/core-util\";\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport {\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n userAgentPolicy,\n} from \"./policies/userAgentPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { InternalPipelineOptions } from \"./pipelineOptions\";\nimport { OperationArguments } from \"./operationArguments\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { URLBuilder } from \"./url\";\nimport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nimport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport { getCachedDefaultHttpClient } from \"./httpClientCache\";\nimport { logger } from \"./log\";\nimport { ndJsonPolicy } from \"./policies/ndJsonPolicy\";\nimport { proxyPolicy } from \"./policies/proxyPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { stringifyXML } from \"./util/xml\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { tracingPolicy } from \"./policies/tracingPolicy\";\n\n/**\n * Options to configure a proxy for outgoing requests (Node.js only).\n */\nexport interface ProxySettings {\n /**\n * The proxy's host address.\n */\n host: string;\n\n /**\n * The proxy host's port.\n */\n port: number;\n\n /**\n * The user name to authenticate with the proxy, if required.\n */\n username?: string;\n\n /**\n * The password to authenticate with the proxy, if required.\n */\n password?: string;\n}\n\n/**\n * An alias of {@link ProxySettings} for future use.\n */\nexport type ProxyOptions = ProxySettings;\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-useragent\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * If specified, will be used to build the BearerTokenAuthenticationPolicy.\n */\n credentialScopes?: string | string[];\n}\n\n/**\n * ServiceClient sends service requests and receives responses.\n */\nexport class ServiceClient {\n /**\n * If specified, this is the base URI that requests will be made against for this ServiceClient.\n * If it is not specified, then all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptions;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: TokenCredential | ServiceClientCredentials,\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n let authPolicyFactory: RequestPolicyFactory | undefined = undefined;\n if (isTokenCredential(credentials)) {\n logger.info(\n \"ServiceClient: creating bearer token authentication policy from provided credentials\"\n );\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n const wrappedPolicyFactory: () => RequestPolicyFactory = () => {\n let bearerTokenPolicyFactory: RequestPolicyFactory | undefined = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const serviceClient = this;\n const serviceClientOptions = options;\n return {\n create(nextPolicy: RequestPolicy, createOptions: RequestPolicyOptions): RequestPolicy {\n const credentialScopes = getCredentialScopes(\n serviceClientOptions,\n serviceClient.baseUri\n );\n\n if (!credentialScopes) {\n throw new Error(\n `When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`\n );\n }\n\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(\n credentials,\n credentialScopes\n );\n }\n\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n },\n };\n };\n\n authPolicyFactory = wrappedPolicyFactory();\n } else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n } else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n const newRequestPolicyFactories: void | RequestPolicyFactory[] =\n options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error: any) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n async sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const serializerOptions = operationArguments.options?.serializerOptions;\n const httpRequest: WebResourceLike = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter),\n serializerOptions\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter),\n serializerOptions\n );\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null\n ) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter),\n serializerOptions\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n\n if (options.spanOptions) {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n (httpRequest as any).spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n\n let rawResponse: HttpOperationResponse;\n let sendRequestError;\n try {\n rawResponse = await this.sendRequest(httpRequest);\n } catch (error: any) {\n sendRequestError = error;\n }\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(\n sendRequestError.response,\n operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]\n );\n }\n result = Promise.reject(sendRequestError);\n } else {\n result = Promise.resolve(\n flattenResponse(rawResponse!, operationSpec.responses[rawResponse!.status])\n );\n }\n } catch (error: any) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n const serializerOptions = operationArguments.options?.serializerOptions ?? {};\n const updatedOptions: Required = {\n rootName: serializerOptions.rootName ?? \"\",\n includeRoot: serializerOptions.includeRoot ?? false,\n xmlCharKey: serializerOptions.xmlCharKey ?? XML_CHARKEY,\n };\n\n const xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } =\n bodyMapper;\n const typeName = bodyMapper.type.name;\n\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString,\n updatedOptions\n );\n\n const isStream = typeName === MapperType.Stream;\n\n if (operationSpec.isXML) {\n const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : \"xmlns\";\n const value = getXmlValueWithNamespace(\n xmlNamespace,\n xmlnsKey,\n typeName,\n httpRequest.body,\n updatedOptions\n );\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n value,\n xmlElementName || xmlName || serializedName!,\n xmlnsKey,\n xmlNamespace\n ),\n {\n rootName: xmlName || serializedName,\n xmlCharKey,\n }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey,\n });\n }\n } else if (\n typeName === MapperType.String &&\n (operationSpec.contentType?.match(\"text/plain\") || operationSpec.mediaType === \"text\")\n ) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error: any) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter),\n updatedOptions\n );\n }\n }\n }\n}\n\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(\n xmlNamespace: string | undefined,\n xmlnsKey: string,\n typeName: string,\n serializedValue: any,\n options: Required\n): any {\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };\n return result;\n }\n\n return serializedValue;\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n authPolicyFactory: RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n if (isNode) {\n factories.push(proxyPolicy(options.proxySettings));\n }\n\n factories.push(logPolicy({ logger: logger.info }));\n\n return factories;\n}\n\n/**\n * Creates an HTTP pipeline based on the given options.\n * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client.\n * @param authPolicyFactory - An optional authentication policy factory to use for signing requests.\n * @returns A set of options that can be passed to create a new {@link ServiceClient}.\n */\nexport function createPipelineFromOptions(\n pipelineOptions: InternalPipelineOptions,\n authPolicyFactory?: RequestPolicyFactory\n): ServiceClientOptions {\n const requestPolicyFactories: RequestPolicyFactory[] = [];\n\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n\n let userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n const userAgentInfo: string[] = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n const defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n\n userAgentValue = userAgentInfo.join(\" \");\n }\n\n const keepAliveOptions = {\n ...DefaultKeepAliveOptions,\n ...pipelineOptions.keepAliveOptions,\n };\n\n const retryOptions = {\n ...DefaultRetryOptions,\n ...pipelineOptions.retryOptions,\n };\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...pipelineOptions.redirectOptions,\n };\n\n if (isNode) {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n\n const deserializationOptions = {\n ...DefaultDeserializationOptions,\n ...pipelineOptions.deserializationOptions,\n };\n\n const loggingOptions: LogPolicyOptions = {\n ...pipelineOptions.loggingOptions,\n };\n\n requestPolicyFactories.push(\n tracingPolicy({ userAgent: userAgentValue }),\n keepAlivePolicy(keepAliveOptions),\n userAgentPolicy({ value: userAgentValue }),\n generateClientRequestIdPolicy(),\n deserializationPolicy(deserializationOptions.expectedContentTypes),\n throttlingRetryPolicy(),\n systemErrorRetryPolicy(),\n exponentialRetryPolicy(\n retryOptions.maxRetries,\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n )\n );\n\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n\n requestPolicyFactories.push(logPolicy(loggingOptions));\n\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories,\n };\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n const serializerOptions = operationArguments.options?.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\n/**\n * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}).\n * @param _response - Wrapper object for http response.\n * @param responseSpec - Mappers for how to parse the response properties.\n * @returns - A normalized response object.\n */\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = >(\n obj: T\n ): T & {\n _response: HttpOperationResponse;\n } => {\n return Object.defineProperty(obj, \"_response\", {\n value: _response,\n }) as T & {\n _response: HttpOperationResponse;\n };\n };\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n const arrayResponse = [...(_response.parsedBody || [])] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n\nfunction getCredentialScopes(\n options?: ServiceClientOptions,\n baseUri?: string\n): string | string[] | undefined {\n if (options?.credentialScopes) {\n return options.credentialScopes;\n }\n\n if (baseUri) {\n return `${baseUri}/.default`;\n }\n return undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js b/node_modules/@azure/core-http/dist-esm/src/url.js new file mode 100644 index 0000000..35503a5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/url.js @@ -0,0 +1,597 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { replaceAll } from "./util/utils"; +/** + * A class that handles the query portion of a URLBuilder. + */ +export class URLQuery { + constructor() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any() { + return Object.keys(this._rawQuery).length > 0; + } + /** + * Get the keys of the query string. + */ + keys() { + return Object.keys(this._rawQuery); + } + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName, parameterValue) { + const caseParameterValue = parameterValue; + if (parameterName) { + if (caseParameterValue !== undefined && caseParameterValue !== null) { + const newValue = Array.isArray(caseParameterValue) + ? caseParameterValue + : caseParameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + } + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString() { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } + else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + /** + * Parse a URLQuery from the provided text. + */ + static parse(text) { + const result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + let currentState = "ParameterName"; + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + } +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export class URLBuilder { + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + } + /** + * Get the scheme that has been set in this URL. + */ + getScheme() { + return this._scheme; + } + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + } + /** + * Get the host that has been set in this URL. + */ + getHost() { + return this._host; + } + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port) { + if (port === undefined || port === null || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + } + /** + * Get the port that has been set in this URL. + */ + getPort() { + return this._port; + } + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path) { + if (!path) { + this._path = undefined; + } + else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + } + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path) { + if (path) { + let currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + /** + * Get the path that has been set in this URL. + */ + getPath() { + return this._path; + } + /** + * Set the query in this URL. + */ + setQuery(query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + } + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + } + /** + * Get the query in this URL. + */ + getQuery() { + return this._query ? this._query.toString() : undefined; + } + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + set(text, startState) { + const tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + const token = tokenizer.current(); + let tokenPath; + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString() { + let result = ""; + if (this._scheme) { + result += `${this._scheme}://`; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += `:${this._port}`; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + return result; + } + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text) { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} +export class URLToken { + constructor(text, type) { + this.text = text; + this.type = type; + } + static scheme(text) { + return new URLToken(text, "SCHEME"); + } + static host(text) { + return new URLToken(text, "HOST"); + } + static port(text) { + return new URLToken(text, "PORT"); + } + static path(text) { + return new URLToken(text, "PATH"); + } + static query(text) { + return new URLToken(text, "QUERY"); + } +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character) { + const characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +export class URLTokenizer { + constructor(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current() { + return this._currentToken; + } + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next() { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + let endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + let result = ""; + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer, ...terminatingCharacters) { + return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +} +function nextScheme(tokenizer) { + const scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + const host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + const port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + const path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + const query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} +//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js.map b/node_modules/@azure/core-http/dist-esm/src/url.js.map new file mode 100644 index 0000000..2be3eb9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/url.js.map @@ -0,0 +1 @@ +{"version":3,"file":"url.js","sourceRoot":"","sources":["../../src/url.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C;;GAEG;AACH,MAAM,OAAO,QAAQ;IAArB;QACmB,cAAS,GAAwD,EAAE,CAAC;IAiIvF,CAAC;IA/HC;;OAEG;IACI,GAAG;QACR,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,CAAC;IAED;;OAEG;IACI,IAAI;QACT,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACI,GAAG,CAAC,aAAqB,EAAE,cAAuB;QACvD,MAAM,kBAAkB,GAAG,cAE1B,CAAC;QACF,IAAI,aAAa,EAAE;YACjB,IAAI,kBAAkB,KAAK,SAAS,IAAI,kBAAkB,KAAK,IAAI,EAAE;gBACnE,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;oBAChD,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,kBAAkB,CAAC,QAAQ,EAAE,CAAC;gBAClC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;IACH,CAAC;IAED;;;OAGG;IACI,GAAG,CAAC,aAAqB;QAC9B,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACnE,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,MAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,MAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAK,MAAM,qBAAqB,IAAI,cAAc,EAAE;oBAClD,gBAAgB,CAAC,IAAI,CAAC,GAAG,aAAa,IAAI,qBAAqB,EAAE,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAI,GAAG,aAAa,IAAI,cAAc,EAAE,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,MAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,MAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY,EAAE;oBACpB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED;;GAEG;AACH,MAAM,OAAO,UAAU;IAOrB;;;OAGG;IACI,SAAS,CAAC,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAiC;QAC9C,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACtD,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;gBACvD,0FAA0F;gBAC1F,2FAA2F;gBAC3F,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;IACH,CAAC;IAED;;;OAGG;IACI,UAAU,CAAC,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,QAAQ,CAAC,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;IACH,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,kBAA0B,EAAE,mBAA4B;QAC/E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;IACH,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAAC,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1D,CAAC;IAED;;OAEG;IACK,GAAG,CAAC,IAAY,EAAE,UAA6B;QACrD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,MAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,SAA6B,CAAC;YAClC,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI,EAAE;oBAClB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACpC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,8BAA8B,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;iBAC/D;aACF;SACF;IACH,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,KAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACI,UAAU,CAAC,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;IACH,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAMD,MAAM,OAAO,QAAQ;IACnB,YAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;IAAG,CAAC;IAEhF,MAAM,CAAC,MAAM,CAAC,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrC,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAiB;IACvD,MAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,OAAO,CACL,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,CAAC,SAAS,CAClE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,YAAY;IAMvB,YAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACtF,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED;;;OAGG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;OAEG;IACI,IAAI;QACT,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa,EAAE;gBAC1B,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,mCAAmC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;CACF;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;GAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,MAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC,SAAiB,EAAE,EAAE,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzF,CAAC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB,EAAE,GAAG,qBAA+B;IACrF,OAAO,SAAS,CACd,SAAS,EACT,CAAC,SAAiB,EAAE,EAAE,CAAC,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,MAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,MAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,MAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Get the keys of the query string.\n */\n public keys(): string[] {\n return Object.keys(this._rawQuery);\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: unknown): void {\n const caseParameterValue = parameterValue as {\n toString: () => string;\n };\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n const newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n let tokenPath: string | undefined;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n /**\n * Serializes the URL as a string.\n * @returns the URL as a string.\n */\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n /**\n * Parses a given string URL into a new {@link URLBuilder}.\n */\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js new file mode 100644 index 0000000..1505a28 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export function encodeByteArray(value) { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export function decodeString(value) { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map new file mode 100644 index 0000000..87ae7d6 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.js","sourceRoot":"","sources":["../../../src/util/base64.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACtC;IACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;KACnC;IACD,OAAO,GAAG,CAAC;AACb,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return btoa(value);\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n let str = \"\";\n for (let i = 0; i < value.length; i++) {\n str += String.fromCharCode(value[i]);\n }\n return btoa(str);\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.js new file mode 100644 index 0000000..f5fff42 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map b/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map new file mode 100644 index 0000000..f7ecda3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.js","sourceRoot":"","sources":["../../../src/util/base64.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,kGAAkG;IAClG,mGAAmG;IACnG,MAAM,WAAW,GAAG,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js b/node_modules/@azure/core-http/dist-esm/src/util/constants.js new file mode 100644 index 0000000..2d3d47b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/constants.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +export const Constants = { + /** + * The core-http version + */ + coreHttpVersion: "3.0.4", + /** + * Specifies HTTP. + */ + HTTP: "http:", + /** + * Specifies HTTPS. + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map b/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map new file mode 100644 index 0000000..504935a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/util/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC;;GAEG;AACH,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB;;OAEG;IACH,eAAe,EAAE,OAAO;IAExB;;OAEG;IACH,IAAI,EAAE,OAAO;IAEb;;OAEG;IACH,KAAK,EAAE,QAAQ;IAEf;;OAEG;IACH,UAAU,EAAE,YAAY;IAExB;;OAEG;IACH,WAAW,EAAE,aAAa;IAE1B;;OAEG;IACH,QAAQ,EAAE,UAAU;IAEpB;;OAEG;IACH,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;QACb;;WAEG;QACH,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;YACpB,kBAAkB,EAAE,GAAG;SACxB;KACF;IAED;;OAEG;IACH,eAAe,EAAE;QACf;;WAEG;QACH,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;QAE9B;;;;WAIG;QACH,WAAW,EAAE,aAAa;QAE1B;;WAEG;QACH,UAAU,EAAE,YAAY;KACzB;CACF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A set of constants used internally when processing requests.\n */\nexport const Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"3.0.4\",\n\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n ServiceUnavailable: 503,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js new file mode 100644 index 0000000..e888845 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const DEFAULT_CLIENT_RETRY_COUNT = 3; +// intervals are in ms +export const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +export const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +export const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +export function isNumber(n) { + return typeof n === "number"; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export function shouldRetry(retryLimit, predicate, retryData, response, error) { + if (!predicate(response, error)) { + return false; + } + return retryData.retryCount < retryLimit; +} +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + const boundedRandDelta = retryOptions.retryInterval * 0.8 + + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); + return retryData; +} +//# sourceMappingURL=exponentialBackoffStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map new file mode 100644 index 0000000..f92923e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialBackoffStrategy.js","sourceRoot":"","sources":["../../../src/util/exponentialBackoffStrategy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,CAAC;AAC5C,sBAAsB;AACtB,MAAM,CAAC,MAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AACvD,MAAM,CAAC,MAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AAC3D,MAAM,CAAC,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAE1D,MAAM,UAAU,QAAQ,CAAC,CAAU;IACjC,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;AAC/B,CAAC;AAaD;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CACzB,UAAkB,EAClB,SAA4E,EAC5E,SAAoB,EACpB,QAAgC,EAChC,KAAkB;IAElB,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,KAAK,CAAC,EAAE;QAC/B,OAAO,KAAK,CAAC;KACd;IAED,OAAO,SAAS,CAAC,UAAU,GAAG,UAAU,CAAC;AAC3C,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,eAAe,CAC7B,YAA2F,EAC3F,YAAuB,EAAE,UAAU,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,EAC1D,GAAgB;IAEhB,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,gBAAgB,GACpB,YAAY,CAAC,aAAa,GAAG,GAAG;QAChC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,YAAY,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACjE,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,YAAY,CAAC,gBAAgB,GAAG,cAAc,EAC9C,YAAY,CAAC,gBAAgB,CAC9B,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"..\";\n\nexport const DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nexport const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nexport const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nexport const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\nexport function isNumber(n: unknown): n is number {\n return typeof n === \"number\";\n}\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nexport function shouldRetry(\n retryLimit: number,\n predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean,\n retryData: RetryData,\n response?: HttpOperationResponse,\n error?: RetryError\n): boolean {\n if (!predicate(response, error)) {\n return false;\n }\n\n return retryData.retryCount < retryLimit;\n}\n\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nexport function updateRetryData(\n retryOptions: { retryInterval: number; minRetryInterval: number; maxRetryInterval: number },\n retryData: RetryData = { retryCount: 0, retryInterval: 0 },\n err?: RetryError\n): RetryData {\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n const boundedRandDelta =\n retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n retryOptions.minRetryInterval + incrementDelta,\n retryOptions.maxRetryInterval\n );\n\n return retryData;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js new file mode 100644 index 0000000..65d8b5e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const custom = {}; +//# sourceMappingURL=inspect.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map new file mode 100644 index 0000000..edb1168 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.browser.js","sourceRoot":"","sources":["../../../src/util/inspect.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,MAAM,GAAG,EAAE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const custom = {};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js new file mode 100644 index 0000000..53fba75 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { inspect } from "util"; +export const custom = inspect.custom; +//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map new file mode 100644 index 0000000..5ba871c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.js","sourceRoot":"","sources":["../../../src/util/inspect.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAE/B,MAAM,CAAC,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { inspect } from \"util\";\n\nexport const custom = inspect.custom;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js new file mode 100644 index 0000000..7683347 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { URLBuilder, URLQuery } from "../url"; +import { isObject } from "./utils"; +const RedactedString = "REDACTED"; +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +export class Sanitizer { + constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { + allowedHeaderNames = Array.isArray(allowedHeaderNames) + ? defaultAllowedHeaderNames.concat(allowedHeaderNames) + : defaultAllowedHeaderNames; + allowedQueryParameters = Array.isArray(allowedQueryParameters) + ? defaultAllowedQueryParameters.concat(allowedQueryParameters) + : defaultAllowedQueryParameters; + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "_headersMap") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(value) { + return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); + } + sanitizeQuery(value) { + return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); + } + sanitizeObject(value, allowedKeys, accessor) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (allowedKeys.has(k.toLowerCase())) { + sanitized[k] = accessor(value, k); + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const urlBuilder = URLBuilder.parse(value); + const queryString = urlBuilder.getQuery(); + if (!queryString) { + return value; + } + const query = URLQuery.parse(queryString); + for (const k of query.keys()) { + if (!this.allowedQueryParameters.has(k.toLowerCase())) { + query.set(k, RedactedString); + } + } + urlBuilder.setQuery(query.toString()); + return urlBuilder.toString(); + } +} +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map new file mode 100644 index 0000000..30d07b3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sanitizer.js","sourceRoot":"","sources":["../../../src/util/sanitizer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAC9C,OAAO,EAAiB,QAAQ,EAAE,MAAM,SAAS,CAAC;AAkBlD,MAAM,cAAc,GAAG,UAAU,CAAC;AAElC,MAAM,yBAAyB,GAAG;IAChC,wBAAwB;IACxB,+BAA+B;IAC/B,gBAAgB;IAChB,6BAA6B;IAC7B,iBAAiB;IACjB,mBAAmB;IACnB,OAAO;IACP,0BAA0B;IAC1B,aAAa;IAEb,kCAAkC;IAClC,8BAA8B;IAC9B,8BAA8B;IAC9B,6BAA6B;IAC7B,+BAA+B;IAC/B,wBAAwB;IACxB,gCAAgC;IAChC,+BAA+B;IAC/B,QAAQ;IAER,QAAQ;IACR,iBAAiB;IACjB,eAAe;IACf,YAAY;IACZ,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,MAAM;IACN,SAAS;IACT,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,eAAe;IACf,QAAQ;IACR,YAAY;IACZ,aAAa;IACb,QAAQ;IACR,mBAAmB;IACnB,YAAY;IACZ,kBAAkB;CACnB,CAAC;AAEF,MAAM,6BAA6B,GAAa,CAAC,aAAa,CAAC,CAAC;AAEhE,MAAM,OAAO,SAAS;IAIpB,YAAY,EAAE,kBAAkB,GAAG,EAAE,EAAE,sBAAsB,GAAG,EAAE,KAAuB,EAAE;QACzF,kBAAkB,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;YACpD,CAAC,CAAC,yBAAyB,CAAC,MAAM,CAAC,kBAAkB,CAAC;YACtD,CAAC,CAAC,yBAAyB,CAAC;QAE9B,sBAAsB,GAAG,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC;YAC5D,CAAC,CAAC,6BAA6B,CAAC,MAAM,CAAC,sBAAsB,CAAC;YAC9D,CAAC,CAAC,6BAA6B,CAAC;QAElC,IAAI,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,sBAAsB,GAAG,IAAI,GAAG,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;IAC5F,CAAC;IAEM,QAAQ,CAAC,GAAY;QAC1B,MAAM,IAAI,GAAG,IAAI,GAAG,EAAW,CAAC;QAChC,OAAO,IAAI,CAAC,SAAS,CACnB,GAAG,EACH,CAAC,GAAW,EAAE,KAAc,EAAE,EAAE;YAC9B,iEAAiE;YACjE,IAAI,KAAK,YAAY,KAAK,EAAE;gBAC1B,uCACK,KAAK,KACR,IAAI,EAAE,KAAK,CAAC,IAAI,EAChB,OAAO,EAAE,KAAK,CAAC,OAAO,IACtB;aACH;YAED,IAAI,GAAG,KAAK,aAAa,EAAE;gBACzB,OAAO,IAAI,CAAC,eAAe,CAAC,KAAsB,CAAC,CAAC;aACrD;iBAAM,IAAI,GAAG,KAAK,KAAK,EAAE;gBACxB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAe,CAAC,CAAC;aAC1C;iBAAM,IAAI,GAAG,KAAK,OAAO,EAAE;gBAC1B,OAAO,IAAI,CAAC,aAAa,CAAC,KAAsB,CAAC,CAAC;aACnD;iBAAM,IAAI,GAAG,KAAK,MAAM,EAAE;gBACzB,6BAA6B;gBAC7B,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,GAAG,KAAK,UAAU,EAAE;gBAC7B,2BAA2B;gBAC3B,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,GAAG,KAAK,eAAe,EAAE;gBAClC,iEAAiE;gBACjE,mDAAmD;gBACnD,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE;gBAClD,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;oBACnB,OAAO,YAAY,CAAC;iBACrB;gBACD,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;aACjB;YAED,OAAO,KAAK,CAAC;QACf,CAAC,EACD,CAAC,CACF,CAAC;IACJ,CAAC;IAEO,eAAe,CAAC,KAAoB;QAC1C,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IACnF,CAAC;IAEO,aAAa,CAAC,KAAoB;QACxC,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC;IAEO,cAAc,CACpB,KAAoB,EACpB,WAAwB,EACxB,QAA0C;QAE1C,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;QAED,MAAM,SAAS,GAAkB,EAAE,CAAC;QAEpC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACpC,SAAS,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;aACnC;iBAAM;gBACL,SAAS,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;aAC/B;SACF;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,WAAW,CAAC,KAAa;QAC/B,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC3C,MAAM,WAAW,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;QAE1C,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,KAAK,CAAC;SACd;QAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QAC1C,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACrD,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;aAC9B;SACF;QAED,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC/B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { URLBuilder, URLQuery } from \"../url\";\nimport { UnknownObject, isObject } from \"./utils\";\n\nexport interface SanitizerOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n}\n\nconst RedactedString = \"REDACTED\";\n\nconst defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n\n \"Accept\",\n \"Accept-Encoding\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"WWW-Authenticate\",\n];\n\nconst defaultAllowedQueryParameters: string[] = [\"api-version\"];\n\nexport class Sanitizer {\n public allowedHeaderNames: Set;\n public allowedQueryParameters: Set;\n\n constructor({ allowedHeaderNames = [], allowedQueryParameters = [] }: SanitizerOptions = {}) {\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n\n this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));\n }\n\n public sanitize(obj: unknown): string {\n const seen = new Set();\n return JSON.stringify(\n obj,\n (key: string, value: unknown) => {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return {\n ...value,\n name: value.name,\n message: value.message,\n };\n }\n\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(value as UnknownObject);\n } else if (key === \"url\") {\n return this.sanitizeUrl(value as string);\n } else if (key === \"query\") {\n return this.sanitizeQuery(value as UnknownObject);\n } else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n } else if (key === \"response\") {\n // Don't log response again\n return undefined;\n } else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n } else if (Array.isArray(value) || isObject(value)) {\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n seen.add(value);\n }\n\n return value;\n },\n 2\n );\n }\n\n private sanitizeHeaders(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value);\n }\n\n private sanitizeQuery(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]);\n }\n\n private sanitizeObject(\n value: UnknownObject,\n allowedKeys: Set,\n accessor: (value: any, key: string) => any\n ): UnknownObject {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n\n const sanitized: UnknownObject = {};\n\n for (const k of Object.keys(value)) {\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n } else {\n sanitized[k] = RedactedString;\n }\n }\n\n return sanitized;\n }\n\n private sanitizeUrl(value: string): string {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n\n const urlBuilder = URLBuilder.parse(value);\n const queryString = urlBuilder.getQuery();\n\n if (!queryString) {\n return value;\n }\n\n const query = URLQuery.parse(queryString);\n for (const k of query.keys()) {\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js new file mode 100644 index 0000000..cc3bb61 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=serializer.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map new file mode 100644 index 0000000..c4c3d96 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.common.js","sourceRoot":"","sources":["../../../src/util/serializer.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC;AAC/B;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Default key used to access the XML attributes.\n */\nexport const XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nexport const XML_CHARKEY = \"_\";\n\n/**\n * Options to govern behavior of xml parser and builder.\n */\nexport interface SerializerOptions {\n /**\n * indicates the name of the root element in the resulting XML when building XML.\n */\n rootName?: string;\n /**\n * indicates whether the root element is to be included or not in the output when parsing XML.\n */\n includeRoot?: boolean;\n /**\n * key used to access the XML value content when parsing XML.\n */\n xmlCharKey?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js new file mode 100644 index 0000000..b9e5a38 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Maximum number of retries for the throttling retry policy + */ +export const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map new file mode 100644 index 0000000..42c8a66 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryStrategy.js","sourceRoot":"","sources":["../../../src/util/throttlingRetryStrategy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,8BAA8B,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Maximum number of retries for the throttling retry policy\n */\nexport const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js b/node_modules/@azure/core-http/dist-esm/src/util/utils.js new file mode 100644 index 0000000..cebdbb1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/utils.js @@ -0,0 +1,206 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Constants } from "./constants"; +import { XML_ATTRKEY } from "./serializer.common"; +import { v4 as uuidv4 } from "uuid"; +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck) { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export function generateUuid() { + return uuidv4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +// eslint-disable-next-line @typescript-eslint/ban-types +export function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +export function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +export function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map b/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map new file mode 100644 index 0000000..67a50b9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/util/utils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAIxC,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAE,EAAE,IAAI,MAAM,EAAE,MAAM,MAAM,CAAC;AAEpC,MAAM,cAAc,GAClB,gFAAgF,CAAC;AAEnF;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,UAAgC;IACzD,OAAO,UAAU,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,KAAK,CAAC;AAC/D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,QAA+B;IAC3D,MAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,YAAY,CAAC,OAAwB;IACnD,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,OAAO,MAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,2BAA2B,CACzC,gBAA4B,EAC5B,SAAkB;IAElB,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE,EAAE;QAC1C,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAqBD;;;;;GAKG;AACH,wDAAwD;AACxD,MAAM,UAAU,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,wDAAwD;IACxD,OAAO,CAAC,EAAY,EAAQ,EAAE;QAC5B,OAAO;aACJ,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE;YAClB,0DAA0D;YAC1D,OAAO,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;QAC7B,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACpB,0DAA0D;YAC1D,EAAE,CAAC,GAAG,CAAC,CAAC;QACV,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CACtC,OAAuC;IAEvC,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,CAAC,EAAsB,EAAQ,EAAE;QACtC,OAAO;aACJ,IAAI,CAAC,CAAC,IAA2B,EAAE,EAAE;YACpC,OAAO,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACnF,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACpB,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;QAC5B,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAChC,GAAY,EACZ,WAAmB,EACnB,eAAwB,EACxB,YAAqB;IAErB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IAED,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY,EAAE;QACrC,OAAO,EAAE,CAAC,WAAW,CAAC,EAAE,GAAG,EAAE,CAAC;KAC/B;IAED,MAAM,MAAM,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE,GAAG,EAAE,CAAC;IACtC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,eAAe,CAAC,EAAE,YAAY,EAAE,CAAC;IAC1D,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,eAAwB,EAAE,WAAkB;IACtE,MAAM,mBAAmB,GAAG,eAE3B,CAAC;IACF,WAAW,CAAC,OAAO,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,CAAC,mBAAmB,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE;YAChE,mBAAmB,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QACnE,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,mBAAmB,GACvB,qKAAqK,CAAC;AAExK;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,KAAc;IAC5C,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,CAAC,IAAI,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAOD;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Constants } from \"./constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { XML_ATTRKEY } from \"./serializer.common\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nconst validUuidRegex =\n /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param urlToCheck - The url to check\n * @returns True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(\n promiseFactories: Array,\n kickstart: unknown\n): Promise {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param err - The error occurred if any, while executing the request; otherwise null.\n * @param result - The deserialized response body if an error did not occur.\n * @param request - The raw/actual request sent to the server if an error did not occur.\n * @param response - The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function promiseToCallback(promise: Promise): (cb: Function) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return (cb: Function): void => {\n promise\n .then((data: any) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch((err: Error) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(\n promise: Promise\n): (cb: ServiceCallback) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise\n .then((data: HttpOperationResponse) => {\n return process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n })\n .catch((err: Error) => {\n process.nextTick(cb, err);\n });\n };\n}\n\nexport function prepareXMLRootList(\n obj: unknown,\n elementName: string,\n xmlNamespaceKey?: string,\n xmlNamespace?: string\n): { [s: string]: any } {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n\n if (!xmlNamespaceKey || !xmlNamespace) {\n return { [elementName]: obj };\n }\n\n const result = { [elementName]: obj };\n result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };\n return result;\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void {\n const castTargetCtorParam = targetCtorParam as {\n prototype: Record;\n };\n sourceCtors.forEach((sourceCtor) => {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\n\nconst validateISODuration =\n /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: unknown): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n/**\n * @internal\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * @internal\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js new file mode 100644 index 0000000..e76da18 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js @@ -0,0 +1,208 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; +if (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) { + throw new Error(`This library depends on the following DOM objects: ["document", "DOMParser", "Node", "XMLSerializer"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `); +} +let cachedDoc; +function getDoc() { + if (!cachedDoc) { + cachedDoc = document.implementation.createDocument(null, null, null); + } + return cachedDoc; +} +let cachedParser; +function getParser() { + if (!cachedParser) { + cachedParser = new DOMParser(); + } + return cachedParser; +} +let cachedSerializer; +function getSerializer() { + if (!cachedSerializer) { + cachedSerializer = new XMLSerializer(); + } + return cachedSerializer; +} +// Policy to make our code Trusted Types compliant. +// https://github.com/w3c/webappsec-trusted-types +// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. +// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing +// according to the spec. There are no HTML/XSS security concerns on the usage of +// parseFromString() here. +let ttPolicy; +if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/core-http#xml.browser", { + createHTML: (s) => s, + }); +} +export function parseXML(str, opts = {}) { + var _a, _b, _c, _d; + try { + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + const dom = getParser().parseFromString(((_d = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _d !== void 0 ? _d : str), "application/xml"); + throwIfError(dom); + let obj; + if (updatedOptions.includeRoot) { + obj = domToObject(dom, updatedOptions); + } + else { + obj = domToObject(dom.childNodes[0], updatedOptions); + } + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +let errorNS; +function getErrorNamespace() { + var _a, _b; + if (errorNS === undefined) { + try { + const invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); + errorNS = + (_b = getParser().parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _b !== void 0 ? _b : ""; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + errorNS = ""; + } + } + return errorNS; +} +function throwIfError(dom) { + const parserErrors = dom.getElementsByTagName("parsererror"); + if (parserErrors.length > 0 && getErrorNamespace()) { + for (let i = 0; i < parserErrors.length; i++) { + if (parserErrors[i].namespaceURI === errorNS) { + throw new Error(parserErrors[i].innerHTML); + } + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node, options) { + let result = {}; + const childNodeCount = node.childNodes.length; + const firstChildNode = node.childNodes[0]; + const onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + const elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result[XML_ATTRKEY] = {}; + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result[options.xmlCharKey] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + const childObject = domToObject(child, options); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +export function stringifyXML(content, opts = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "root", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0]; + return ('' + + getSerializer().serializeToString(dom)); +} +function buildAttributes(attrs) { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = getDoc().createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName, options) { + if (obj === undefined || + obj === null || + typeof obj === "string" || + typeof obj === "number" || + typeof obj === "boolean") { + const elem = getDoc().createElement(elementName); + elem.textContent = obj === undefined || obj === null ? "" : obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName, options)) { + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + const elem = getDoc().createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === XML_ATTRKEY) { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } + else if (key === options.xmlCharKey) { + elem.textContent = obj[key].toString(); + } + else { + for (const child of buildNode(obj[key], key, options)) { + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} +//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map new file mode 100644 index 0000000..7568194 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.js","sourceRoot":"","sources":["../../../src/util/xml.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElF,IAAI,CAAC,IAAI,CAAC,QAAQ,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;IAC1E,MAAM,IAAI,KAAK,CACb,oUAAoU,CACrU,CAAC;CACH;AAED,IAAI,SAA+B,CAAC;AACpC,SAAS,MAAM;IACb,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;KACtE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,IAAI,YAAmC,CAAC;AACxC,SAAS,SAAS;IAChB,IAAI,CAAC,YAAY,EAAE;QACjB,YAAY,GAAG,IAAI,SAAS,EAAE,CAAC;KAChC;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,IAAI,gBAA2C,CAAC;AAChD,SAAS,aAAa;IACpB,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,IAAI,aAAa,EAAE,CAAC;KACxC;IACD,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,mDAAmD;AACnD,mDAAmD;AACnD,uFAAuF;AACvF,qFAAqF;AACrF,kFAAkF;AAClF,0BAA0B;AAC1B,IAAI,QAA2D,CAAC;AAChE,IAAI,OAAO,IAAI,CAAC,YAAY,KAAK,WAAW,EAAE;IAC5C,QAAQ,GAAG,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,8BAA8B,EAAE;QACxE,UAAU,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;KACrB,CAAC,CAAC;CACJ;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE;;IAChE,IAAI;QACF,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,IAAI,CAAC,QAAQ,mCAAI,EAAE;YAC7B,WAAW,EAAE,MAAA,IAAI,CAAC,WAAW,mCAAI,KAAK;YACtC,UAAU,EAAE,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW;SAC3C,CAAC;QACF,MAAM,GAAG,GAAG,SAAS,EAAE,CAAC,eAAe,CACrC,CAAC,MAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,GAAG,CAAC,mCAAI,GAAG,CAAW,EAC5C,iBAAiB,CAClB,CAAC;QACF,YAAY,CAAC,GAAG,CAAC,CAAC;QAElB,IAAI,GAAG,CAAC;QACR,IAAI,cAAc,CAAC,WAAW,EAAE;YAC9B,GAAG,GAAG,WAAW,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC;SACxC;aAAM;YACL,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;SACtD;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;KAC7B;IAAC,OAAO,GAAQ,EAAE;QACjB,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;AACH,CAAC;AAED,IAAI,OAA2B,CAAC;AAEhC,SAAS,iBAAiB;;IACxB,IAAI,OAAO,KAAK,SAAS,EAAE;QACzB,IAAI;YACF,MAAM,UAAU,GAAG,CAAC,MAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS,CAAC,mCAAI,SAAS,CAAW,CAAC;YAC5E,OAAO;gBACL,MAAA,SAAS,EAAE,CAAC,eAAe,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;qBACvF,YAAa,mCAAI,EAAE,CAAC;SAC1B;QAAC,OAAO,OAAY,EAAE;YACrB,oFAAoF;YACpF,OAAO,GAAG,EAAE,CAAC;SACd;KACF;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,YAAY,CAAC,GAAa;IACjC,MAAM,YAAY,GAAG,GAAG,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7D,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,EAAE,EAAE;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,KAAK,OAAO,EAAE;gBAC5C,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;aAC5C;SACF;KACF;AACH,CAAC;AAED,SAAS,SAAS,CAAC,IAAU;IAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,IAAU;IACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,CAAC;AAED,SAAS,WAAW,CAAC,IAAU,EAAE,OAAoC;IACnE,IAAI,MAAM,GAAQ,EAAE,CAAC;IAErB,MAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAEtD,MAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChD,MAAM,kBAAkB,GACtB,CAAC,cAAc;QACb,cAAc,KAAK,CAAC;QACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;QAC1C,cAAc,CAAC,SAAS,CAAC;QAC3B,SAAS,CAAC;IAEZ,MAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjF,IAAI,qBAAqB,EAAE;QACzB,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;QAEzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjD,MAAM,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;SACrD;QAED,IAAI,kBAAkB,EAAE;YACtB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,kBAAkB,CAAC;SACjD;KACF;SAAM,IAAI,cAAc,KAAK,CAAC,EAAE;QAC/B,MAAM,GAAG,EAAE,CAAC;KACb;SAAM,IAAI,kBAAkB,EAAE;QAC7B,MAAM,GAAG,kBAAkB,CAAC;KAC7B;IAED,IAAI,CAAC,kBAAkB,EAAE;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjC,2CAA2C;YAC3C,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;gBACrC,MAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;gBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;oBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;iBACtC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;oBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;iBAC1C;qBAAM;oBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;iBAChE;aACF;SACF;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAgB,EAAE,OAA0B,EAAE;;IACzE,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,IAAI,CAAC,QAAQ,mCAAI,MAAM;QACjC,WAAW,EAAE,MAAA,IAAI,CAAC,WAAW,mCAAI,KAAK;QACtC,UAAU,EAAE,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW;KAC3C,CAAC;IACF,MAAM,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,cAAc,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3E,OAAO,CACL,yDAAyD;QACzD,aAAa,EAAE,CAAC,iBAAiB,CAAC,GAAG,CAAC,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CAAC,KAAgD;IACvE,MAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB,EAAE,OAAoC;IACpF,IACE,GAAG,KAAK,SAAS;QACjB,GAAG,KAAK,IAAI;QACZ,OAAO,GAAG,KAAK,QAAQ;QACvB,OAAO,GAAG,KAAK,QAAQ;QACvB,OAAO,GAAG,KAAK,SAAS,EACxB;QACA,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QACjD,IAAI,CAAC,WAAW,GAAG,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;QAC3E,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QAC7B,MAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAK,MAAM,SAAS,IAAI,GAAG,EAAE;YAC3B,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,SAAS,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE;gBAC9D,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACpB;SACF;QACD,OAAO,MAAM,CAAC;KACf;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QACjD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI,GAAG,KAAK,WAAW,EAAE;gBACvB,KAAK,MAAM,IAAI,IAAI,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE;oBAC5C,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;iBACpC;aACF;iBAAM,IAAI,GAAG,KAAK,OAAO,CAAC,UAAU,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;aACxC;iBAAM;gBACL,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE;oBACrD,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;iBACzB;aACF;SACF;QACD,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,EAAE,CAAC,CAAC;KAChE;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\nif (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) {\n throw new Error(\n `This library depends on the following DOM objects: [\"document\", \"DOMParser\", \"Node\", \"XMLSerializer\"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `\n );\n}\n\nlet cachedDoc: Document | undefined;\nfunction getDoc(): Document {\n if (!cachedDoc) {\n cachedDoc = document.implementation.createDocument(null, null, null);\n }\n return cachedDoc;\n}\n\nlet cachedParser: DOMParser | undefined;\nfunction getParser(): DOMParser {\n if (!cachedParser) {\n cachedParser = new DOMParser();\n }\n return cachedParser;\n}\n\nlet cachedSerializer: XMLSerializer | undefined;\nfunction getSerializer(): XMLSerializer {\n if (!cachedSerializer) {\n cachedSerializer = new XMLSerializer();\n }\n return cachedSerializer;\n}\n\n// Policy to make our code Trusted Types compliant.\n// https://github.com/w3c/webappsec-trusted-types\n// We are calling DOMParser.parseFromString() to parse XML payload from Azure services.\n// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing\n// according to the spec. There are no HTML/XSS security concerns on the usage of\n// parseFromString() here.\nlet ttPolicy: Pick | undefined;\nif (typeof self.trustedTypes !== \"undefined\") {\n ttPolicy = self.trustedTypes.createPolicy(\"@azure/core-http#xml.browser\", {\n createHTML: (s) => s,\n });\n}\n\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n try {\n const updatedOptions: Required = {\n rootName: opts.rootName ?? \"\",\n includeRoot: opts.includeRoot ?? false,\n xmlCharKey: opts.xmlCharKey ?? XML_CHARKEY,\n };\n const dom = getParser().parseFromString(\n (ttPolicy?.createHTML(str) ?? str) as string,\n \"application/xml\"\n );\n throwIfError(dom);\n\n let obj;\n if (updatedOptions.includeRoot) {\n obj = domToObject(dom, updatedOptions);\n } else {\n obj = domToObject(dom.childNodes[0], updatedOptions);\n }\n\n return Promise.resolve(obj);\n } catch (err: any) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS: string | undefined;\n\nfunction getErrorNamespace(): string {\n if (errorNS === undefined) {\n try {\n const invalidXML = (ttPolicy?.createHTML(\"INVALID\") ?? \"INVALID\") as string;\n errorNS =\n getParser().parseFromString(invalidXML, \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n } catch (ignored: any) {\n // Most browsers will return a document containing , but IE will throw.\n errorNS = \"\";\n }\n }\n return errorNS;\n}\n\nfunction throwIfError(dom: Document): void {\n const parserErrors = dom.getElementsByTagName(\"parsererror\");\n if (parserErrors.length > 0 && getErrorNamespace()) {\n for (let i = 0; i < parserErrors.length; i++) {\n if (parserErrors[i].namespaceURI === errorNS) {\n throw new Error(parserErrors[i].innerHTML);\n }\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node, options: Required): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[XML_ATTRKEY] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[options.xmlCharKey] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child, options);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\nexport function stringifyXML(content: unknown, opts: SerializerOptions = {}): string {\n const updatedOptions: Required = {\n rootName: opts.rootName ?? \"root\",\n includeRoot: opts.includeRoot ?? false,\n xmlCharKey: opts.xmlCharKey ?? XML_CHARKEY,\n };\n const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0];\n return (\n '' +\n getSerializer().serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = getDoc().createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string, options: Required): Node[] {\n if (\n obj === undefined ||\n obj === null ||\n typeof obj === \"string\" ||\n typeof obj === \"number\" ||\n typeof obj === \"boolean\"\n ) {\n const elem = getDoc().createElement(elementName);\n elem.textContent = obj === undefined || obj === null ? \"\" : obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName, options)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = getDoc().createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === XML_ATTRKEY) {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else if (key === options.xmlCharKey) {\n elem.textContent = obj[key].toString();\n } else {\n for (const child of buildNode(obj[key], key, options)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.js new file mode 100644 index 0000000..230f0c2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as xml2js from "xml2js"; +import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; +// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed +// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 +// By creating a new copy of the settings each time we instantiate the parser, +// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. +const xml2jsDefaultOptionsV2 = { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: XML_ATTRKEY, + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: undefined, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: "$$", + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: undefined, + attrValueProcessors: undefined, + tagNameProcessors: undefined, + valueProcessors: undefined, + rootName: "root", + xmldec: { + version: "1.0", + encoding: "UTF-8", + standalone: true, + }, + doctype: undefined, + renderOpts: { + pretty: true, + indent: " ", + newline: "\n", + }, + headless: false, + chunkSize: 10000, + emptyTag: "", + cdata: false, +}; +// The xml2js settings for general XML parsing operations. +const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsParserSettings.explicitArray = false; +// The xml2js settings for general XML building operations. +const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsBuilderSettings.explicitArray = false; +xml2jsBuilderSettings.renderOpts = { + pretty: false, +}; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export function stringifyXML(obj, opts = {}) { + var _a; + xml2jsBuilderSettings.rootName = opts.rootName; + xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const builder = new xml2js.Builder(xml2jsBuilderSettings); + return builder.buildObject(obj); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export function parseXML(str, opts = {}) { + var _a; + xml2jsParserSettings.explicitRoot = !!opts.includeRoot; + xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const xmlParser = new xml2js.Parser(xml2jsParserSettings); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map b/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map new file mode 100644 index 0000000..c9c211b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.js","sourceRoot":"","sources":["../../../src/util/xml.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElF,qIAAqI;AACrI,mGAAmG;AACnG,8EAA8E;AAC9E,+GAA+G;AAC/G,MAAM,sBAAsB,GAAqB;IAC/C,eAAe,EAAE,KAAK;IACtB,IAAI,EAAE,KAAK;IACX,SAAS,EAAE,KAAK;IAChB,aAAa,EAAE,KAAK;IACpB,OAAO,EAAE,WAAW;IACpB,aAAa,EAAE,IAAI;IACnB,WAAW,EAAE,KAAK;IAClB,UAAU,EAAE,KAAK;IACjB,YAAY,EAAE,IAAI;IAClB,SAAS,EAAE,SAAS;IACpB,KAAK,EAAE,KAAK;IACZ,gBAAgB,EAAE,KAAK;IACvB,qBAAqB,EAAE,KAAK;IAC5B,QAAQ,EAAE,IAAI;IACd,eAAe,EAAE,KAAK;IACtB,iBAAiB,EAAE,KAAK;IACxB,KAAK,EAAE,KAAK;IACZ,MAAM,EAAE,IAAI;IACZ,kBAAkB,EAAE,SAAS;IAC7B,mBAAmB,EAAE,SAAS;IAC9B,iBAAiB,EAAE,SAAS;IAC5B,eAAe,EAAE,SAAS;IAC1B,QAAQ,EAAE,MAAM;IAChB,MAAM,EAAE;QACN,OAAO,EAAE,KAAK;QACd,QAAQ,EAAE,OAAO;QACjB,UAAU,EAAE,IAAI;KACjB;IACD,OAAO,EAAE,SAAS;IAClB,UAAU,EAAE;QACV,MAAM,EAAE,IAAI;QACZ,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;KACd;IACD,QAAQ,EAAE,KAAK;IACf,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,EAAE;IACZ,KAAK,EAAE,KAAK;CACb,CAAC;AAEF,0DAA0D;AAC1D,MAAM,oBAAoB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC5E,oBAAoB,CAAC,aAAa,GAAG,KAAK,CAAC;AAE3C,2DAA2D;AAC3D,MAAM,qBAAqB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC7E,qBAAqB,CAAC,aAAa,GAAG,KAAK,CAAC;AAC5C,qBAAqB,CAAC,UAAU,GAAG;IACjC,MAAM,EAAE,KAAK;CACd,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,GAAY,EAAE,OAA0B,EAAE;;IACrE,qBAAqB,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC/C,qBAAqB,CAAC,OAAO,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC/D,MAAM,OAAO,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;IAC1D,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE;;IAChE,oBAAoB,CAAC,YAAY,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;IACvD,oBAAoB,CAAC,OAAO,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC9D,MAAM,SAAS,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAC1D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAQ,EAAE,GAAQ,EAAE,EAAE;gBAChD,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as xml2js from \"xml2js\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nconst xml2jsDefaultOptionsV2: xml2js.OptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true,\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\",\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false,\n};\n\n// The xml2js settings for general XML parsing operations.\nconst xml2jsParserSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n\n// The xml2js settings for general XML building operations.\nconst xml2jsBuilderSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false,\n};\n\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nexport function stringifyXML(obj: unknown, opts: SerializerOptions = {}): string {\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err: any, res: any) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js b/node_modules/@azure/core-http/dist-esm/src/webResource.js new file mode 100644 index 0000000..3fc3d27 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/webResource.js @@ -0,0 +1,264 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; +import { Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +export function isWebResourceLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.url === "string" && + typeof castObject.method === "string" && + typeof castObject.headers === "object" && + isHttpHeadersLike(castObject.headers) && + typeof castObject.validateRequestProperties === "function" && + typeof castObject.prepare === "function" && + typeof castObject.clone === "function") { + return true; + } + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export class WebResource { + constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { + this.streamResponseBody = streamResponseBody; + this.streamResponseStatusCodes = streamResponseStatusCodes; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.decompressResponse = decompressResponse; + this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties() { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method === undefined || + options.method === null || + typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate === undefined || + options.pathTemplate === null || + typeof options.pathTemplate.valueOf() !== "string") && + (options.url === undefined || + options.url === null || + typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = pathParameters[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); + throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in parameters: ${stringifiedPathParameters}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + } + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } + else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", this.requestId); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly + this.body = options.body; + if (options.body !== undefined && options.body !== null) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + if (options.spanOptions) { + this.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + this.tracingContext = options.tracingContext; + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + return this; + } + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone() { + const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + } +} +//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js.map b/node_modules/@azure/core-http/dist-esm/src/webResource.js.map new file mode 100644 index 0000000..e5ed93c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/webResource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.js","sourceRoot":"","sources":["../../src/webResource.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,WAAW,EAAmB,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAChF,OAAO,EAAU,UAAU,EAAE,MAAM,cAAc,CAAC;AAOlD,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AAuJ5C,MAAM,UAAU,iBAAiB,CAAC,MAAe;IAC/C,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAOlB,CAAC;QACF,IACE,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;YAClC,OAAO,UAAU,CAAC,MAAM,KAAK,QAAQ;YACrC,OAAO,UAAU,CAAC,OAAO,KAAK,QAAQ;YACtC,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC;YACrC,OAAO,UAAU,CAAC,yBAAyB,KAAK,UAAU;YAC1D,OAAO,UAAU,CAAC,OAAO,KAAK,UAAU;YACxC,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU,EACtC;YACA,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,OAAO,WAAW;IAsGtB,YACE,GAAY,EACZ,MAAoB,EACpB,IAAc,EACd,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,kBAA4B,EAC5B,yBAAuC;QAEvC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,yBAAyB,GAAG,yBAAyB,CAAC;QAC3D,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,YAAY,EAAE,CAAC;IAChF,CAAC;IAED;;;;OAIG;IACH,yBAAyB;QACvB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;IACH,CAAC;IAED;;;;OAIG;IACH,OAAO,CAAC,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IACE,OAAO,CAAC,MAAM,KAAK,SAAS;YAC5B,OAAO,CAAC,MAAM,KAAK,IAAI;YACvB,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAC5C;YACA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,KAAK,SAAS;YACjC,OAAO,CAAC,YAAY,KAAK,IAAI;YAC7B,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC;YACrD,CAAC,OAAO,CAAC,GAAG,KAAK,SAAS;gBACxB,OAAO,CAAC,GAAG,KAAK,IAAI;gBACpB,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EAC5C;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,iCAAiC;QACjC,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;QAED,iBAAiB;QACjB,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;QAE1D,iDAAiD;QACjD,IAAI,OAAO,CAAC,YAAY,EAAE;YACxB,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC;YACjD,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,GAAG,GACL,OAAO;gBACP,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClC,CAAC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC;YACxE,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;YACpD,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,cAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,0EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,MAAM,SAAS,GAAI,cAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,CAAC,CAAC,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,yBAAyB,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;wBAC/E,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,gCAAgC,aAAa,EAAE;4BAC1E,8CAA8C,yBAAyB,GAAG;4BAC1E,0EAA0E,aAAa,6BAA6B;4BACpH,wCAAwC,aAAa,6DAA6D,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,mEAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;SAChB;QAED,iHAAiH;QACjH,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,qFAAqF;oBACrF,2IAA2I,CAC9I,CAAC;aACH;YACD,uDAAuD;YACvD,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;YACD,wBAAwB;YACxB,MAAM,WAAW,GAAG,EAAE,CAAC;YACvB,4GAA4G;YAC5G,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,MAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,2BAA2B,cAAc,mEAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF,CAAC,aAAa;YACf,yBAAyB;YACzB,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;QAED,kDAAkD;QAClD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBACrD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;QACD,0CAA0C;QAC1C,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;QACD,yCAAyC;QACzC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SAC5D;QAED,UAAU;QACV,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;QAED,0HAA0H;QAC1H,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,IAAI,EAAE;YACvD,6HAA6H;YAC7H,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;SACxC;QAED,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;SAC9C;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QAEjD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACH,KAAK;QACH,MAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,yBAAyB,CAC/B,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Context, SpanOptions } from \"@azure/core-tracing\";\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { SerializerOptions } from \"./util/serializer.common\";\nimport { generateUuid } from \"./util/utils\";\n\n/**\n * List of supported HTTP methods.\n */\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\n\n/**\n * Possible HTTP request body types\n */\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * A description of a HTTP request to be made to a remote server.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * A unique identifier for the request. Used for logging and tracing.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: unknown): object is WebResourceLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n url: unknown;\n method: unknown;\n headers: unknown;\n validateRequestProperties: unknown;\n prepare: unknown;\n clone: unknown;\n };\n if (\n typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\"\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nexport class WebResource implements WebResourceLike {\n /**\n * URL of the outgoing request.\n */\n url: string;\n /**\n * HTTP method to use.\n */\n method: HttpMethods;\n /**\n * Request body.\n */\n body?: any;\n /**\n * HTTP headers.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * Query added to the URL.\n */\n query?: { [key: string]: any };\n /**\n * Specification of the HTTP request.\n */\n operationSpec?: OperationSpec;\n /**\n * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination.\n */\n withCredentials: boolean;\n /**\n * How long to wait in milliseconds before aborting the request.\n */\n timeout: number;\n /**\n * What proxy to use, if necessary.\n */\n proxySettings?: ProxySettings;\n /**\n * Whether to keep the HTTP connections alive throughout requests.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * Unique identifier of the outgoing request.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * Tracing: Context used when creating Spans.\n */\n tracingContext?: Context;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: unknown,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n decompressResponse?: boolean,\n streamResponseStatusCodes?: Set\n ) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (\n options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\"\n ) {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2);\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in parameters: ${stringifiedPathParameters}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.decompressResponse,\n this.streamResponseStatusCodes\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\n/**\n * Options to prepare an outgoing HTTP request.\n */\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: `{ \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }`\n * - query-parameter-value in \"string\" format: `{ \"query-parameter-name\": \"query-parameter-value\"}`.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}`\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: `{ \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }`\n * - path-parameter-value in \"string\" format: `{ \"path-parameter-name\": \"path-parameter-value\" }`.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n /**\n * Form data, used to build the request body.\n */\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: Record;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Allows keeping track of the progress of uploading the outgoing request.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Allows keeping track of the progress of downloading the incoming response.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n /**\n * Value of the parameter.\n */\n value: any;\n /**\n * Disables URL encoding if set to true.\n */\n skipUrlEncoding: boolean;\n /**\n * Parameter values may contain any other property.\n */\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * May contain other properties.\n */\n [key: string]: any;\n\n /**\n * Options to override XML parsing/building behavior.\n */\n serializerOptions?: SerializerOptions;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js new file mode 100644 index 0000000..2e3b259 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "./httpHeaders"; +import { AbortError } from "@azure/abort-controller"; +import { RestError } from "./restError"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export class XhrHttpClient { + sendRequest(request) { + var _a; + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + return Promise.reject(new AbortError("The operation was aborted.")); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + const formData = request.formData; + const requestForm = new FormData(); + const appendFormValue = (key, value) => { + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm; + request.formData = undefined; + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const header of request.headers.headersArray()) { + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) || request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if (request.streamResponseBody || ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + // Blob.text() is not supported in IE so using FileReader instead + const reader = new FileReader(); + reader.onload = function (e) { + var _a; + const text = (_a = e.target) === null || _a === void 0 ? void 0 : _a.result; + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + }; + reader.onerror = function (_e) { + rej(reader.error); + }; + reader.readAsText(xhr.response, "UTF-8"); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +// exported locally for testing +export function parseHeaders(xhr) { + const responseHeaders = new HttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, RestError.REQUEST_SEND_ERROR, undefined, request))); + const abortError = new AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map new file mode 100644 index 0000000..3d02017 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.js","sourceRoot":"","sources":["../../src/xhrHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAE7D,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAGrD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,OAAO,aAAa;IACjB,WAAW,CAAC,OAAwB;;QACzC,MAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;QAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACxC,IAAI,WAAW,EAAE;YACf,IAAI,WAAW,CAAC,OAAO,EAAE;gBACvB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC,CAAC;aACrE;YAED,MAAM,QAAQ,GAAG,GAAS,EAAE;gBAC1B,GAAG,CAAC,KAAK,EAAE,CAAC;YACd,CAAC,CAAC;YACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,GAAG,EAAE;gBAC5C,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;oBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;iBACpD;YACH,CAAC,CAAC,CAAC;SACJ;QAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YAClC,MAAM,WAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,EAAQ,EAAE;gBACxD,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;oBACA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBAC3C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,OAAO,CAAC,IAAI,GAAG,WAAW,CAAC;YAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC7B,MAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,kEAAkE;gBAClE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;aACxC;SACF;QAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC9C,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACjD;QAED,GAAG,CAAC,YAAY;YACd,CAAA,MAAA,OAAO,CAAC,yBAAyB,0CAAE,IAAI,KAAI,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;QAE1F,2CAA2C;QAC3C,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE3D,IAAI,GAAG,CAAC,YAAY,KAAK,MAAM,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,kBAAkB,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;gBAClD,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;aAAM;YACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;gBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE,CAChC,OAAO,CAAC;oBACN,OAAO;oBACP,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;oBAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;iBAC7B,CAAC,CACH,CAAC;gBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;CACF;AAED,SAAS,kBAAkB,CACzB,GAAmB,EACnB,OAAwB,EACxB,GAAgF,EAChF,GAA2B;IAE3B,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,GAAG,EAAE;;QAC5C,wCAAwC;QACxC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;YACtD,IAAI,OAAO,CAAC,kBAAkB,KAAI,MAAA,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA,EAAE;gBACpF,MAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBACrD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE;wBAChC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;oBACxB,CAAC,CAAC,CAAC;oBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;gBAC9C,CAAC,CAAC,CAAC;gBACH,GAAG,CAAC;oBACF,OAAO;oBACP,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;oBAC1B,QAAQ;iBACT,CAAC,CAAC;aACJ;iBAAM;gBACL,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE;oBAChC,uFAAuF;oBACvF,sEAAsE;oBACtE,oDAAoD;oBACpD,IAAI,GAAG,CAAC,QAAQ,EAAE;wBAChB,iEAAiE;wBACjE,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;wBAChC,MAAM,CAAC,MAAM,GAAG,UAAU,CAAC;;4BACzB,MAAM,IAAI,GAAG,MAAA,CAAC,CAAC,MAAM,0CAAE,MAAgB,CAAC;4BACxC,GAAG,CAAC;gCACF,OAAO;gCACP,MAAM,EAAE,GAAG,CAAC,MAAM;gCAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;gCAC1B,UAAU,EAAE,IAAI;6BACjB,CAAC,CAAC;wBACL,CAAC,CAAC;wBACF,MAAM,CAAC,OAAO,GAAG,UAAU,EAAE;4BAC3B,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;wBACpB,CAAC,CAAC;wBACF,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;qBAC1C;yBAAM;wBACL,GAAG,CAAC;4BACF,OAAO;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;yBAC3B,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;IAEpD,IAAI,QAAQ,EAAE;QACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,CAAC,QAAQ,EAAE,EAAE,CAC5C,QAAQ,CAAC;YACP,WAAW,EAAE,QAAQ,CAAC,MAAM;SAC7B,CAAC,CACH,CAAC;KACH;AACH,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,YAAY,CAAC,GAAmB;IAC9C,MAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;IAC1C,MAAM,WAAW,GAAG,GAAG;SACpB,qBAAqB,EAAE;SACvB,IAAI,EAAE;SACN,KAAK,CAAC,SAAS,CAAC,CAAC;IACpB,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;QAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;KAC9C;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;IAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CACjC,MAAM,CACJ,IAAI,SAAS,CACX,6BAA6B,OAAO,CAAC,GAAG,EAAE,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF,CACF,CAAC;IACF,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;IAChE,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;IACxD,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AAC5D,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n if (abortSignal.aborted) {\n return Promise.reject(new AbortError(\"The operation was aborted.\"));\n }\n\n const listener = (): void => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n\n xhr.responseType =\n request.streamResponseStatusCodes?.size || request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (xhr.responseType === \"blob\") {\n return new Promise((resolve, reject) => {\n handleBlobResponse(xhr, request, resolve, reject);\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction handleBlobResponse(\n xhr: XMLHttpRequest,\n request: WebResourceLike,\n res: (value: HttpOperationResponse | PromiseLike) => void,\n rej: (reason?: any) => void\n): void {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n if (request.streamResponseBody || request.streamResponseStatusCodes?.has(xhr.status)) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n } else {\n xhr.addEventListener(\"load\", () => {\n // xhr.response is of Blob type if the request is sent with xhr.responseType === \"blob\"\n // but the status code is not one of the stream response status codes,\n // so treat it as text and convert from Blob to text\n if (xhr.response) {\n // Blob.text() is not supported in IE so using FileReader instead\n const reader = new FileReader();\n reader.onload = function (e) {\n const text = e.target?.result as string;\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: text,\n });\n };\n reader.onerror = function (_e) {\n rej(reader.error);\n };\n reader.readAsText(xhr.response, \"UTF-8\");\n } else {\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n });\n }\n });\n }\n }\n });\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n): void {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n): void {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n const abortError = new AbortError(\"The operation was aborted.\");\n xhr.addEventListener(\"abort\", () => reject(abortError));\n xhr.addEventListener(\"timeout\", () => reject(abortError));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist/index.js b/node_modules/@azure/core-http/dist/index.js new file mode 100644 index 0000000..602ee12 --- /dev/null +++ b/node_modules/@azure/core-http/dist/index.js @@ -0,0 +1,5465 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var uuid = require('uuid'); +var util = require('util'); +var tslib = require('tslib'); +var xml2js = require('xml2js'); +var coreUtil = require('@azure/core-util'); +var logger$1 = require('@azure/logger'); +var coreAuth = require('@azure/core-auth'); +var os = require('os'); +var http = require('http'); +var https = require('https'); +var abortController = require('@azure/abort-controller'); +var tunnel = require('tunnel'); +var stream = require('stream'); +var FormData = require('form-data'); +var node_fetch = require('node-fetch'); +var coreTracing = require('@azure/core-tracing'); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString().trim(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + /** + * The core-http version + */ + coreHttpVersion: "3.0.4", + /** + * Specifies HTTP. + */ + HTTP: "http:", + /** + * Specifies HTTPS. + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +const XML_CHARKEY = "_"; + +// Copyright (c) Microsoft Corporation. +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +function generateUuid() { + return uuid.v4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +// eslint-disable-next-line @typescript-eslint/ban-types +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} + +// Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +class Serializer { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value != undefined) { + const valueAsNumber = value; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + const valueAsArray = value; + if (MaxItems != undefined && valueAsArray.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && valueAsArray.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && valueAsArray.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && valueAsArray.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper, object, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object == undefined) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper, responseBody, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xmlCharKey; + const castResponseBody = responseBody; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (castResponseBody[XML_ATTRKEY] != undefined && + castResponseBody[xmlCharKey] != undefined) { + responseBody = castResponseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = encodeByteArray(value); + } + return returnValue; +} +function serializeBase64UrlType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = bufferToBase64Url(value) || ""; + } + return returnValue; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by `className`. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + * @param objectName - Name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && + (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName != undefined) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +function serializeObject(toSerialize) { + const castToSerialize = toSerialize; + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + const dictionary = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(castToSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + const result = {}; + for (const key of o) { + result[key] = key; + } + return result; +} +/** + * String enum containing the string types of property mappers. + */ +// eslint-disable-next-line @typescript-eslint/no-redeclare +const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); + +// Copyright (c) Microsoft Corporation. +function isWebResourceLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.url === "string" && + typeof castObject.method === "string" && + typeof castObject.headers === "object" && + isHttpHeadersLike(castObject.headers) && + typeof castObject.validateRequestProperties === "function" && + typeof castObject.prepare === "function" && + typeof castObject.clone === "function") { + return true; + } + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +class WebResource { + constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { + this.streamResponseBody = streamResponseBody; + this.streamResponseStatusCodes = streamResponseStatusCodes; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.decompressResponse = decompressResponse; + this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties() { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method === undefined || + options.method === null || + typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate === undefined || + options.pathTemplate === null || + typeof options.pathTemplate.valueOf() !== "string") && + (options.url === undefined || + options.url === null || + typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = pathParameters[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); + throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in parameters: ${stringifiedPathParameters}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + } + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } + else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", this.requestId); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly + this.body = options.body; + if (options.body !== undefined && options.body !== null) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + if (options.spanOptions) { + this.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + this.tracingContext = options.tracingContext; + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + return this; + } + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone() { + const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A class that handles the query portion of a URLBuilder. + */ +class URLQuery { + constructor() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any() { + return Object.keys(this._rawQuery).length > 0; + } + /** + * Get the keys of the query string. + */ + keys() { + return Object.keys(this._rawQuery); + } + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName, parameterValue) { + const caseParameterValue = parameterValue; + if (parameterName) { + if (caseParameterValue !== undefined && caseParameterValue !== null) { + const newValue = Array.isArray(caseParameterValue) + ? caseParameterValue + : caseParameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + } + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString() { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } + else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + /** + * Parse a URLQuery from the provided text. + */ + static parse(text) { + const result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + let currentState = "ParameterName"; + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + } +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +class URLBuilder { + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + } + /** + * Get the scheme that has been set in this URL. + */ + getScheme() { + return this._scheme; + } + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + } + /** + * Get the host that has been set in this URL. + */ + getHost() { + return this._host; + } + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port) { + if (port === undefined || port === null || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + } + /** + * Get the port that has been set in this URL. + */ + getPort() { + return this._port; + } + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path) { + if (!path) { + this._path = undefined; + } + else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + } + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path) { + if (path) { + let currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + /** + * Get the path that has been set in this URL. + */ + getPath() { + return this._path; + } + /** + * Set the query in this URL. + */ + setQuery(query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + } + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + } + /** + * Get the query in this URL. + */ + getQuery() { + return this._query ? this._query.toString() : undefined; + } + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + set(text, startState) { + const tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + const token = tokenizer.current(); + let tokenPath; + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString() { + let result = ""; + if (this._scheme) { + result += `${this._scheme}://`; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += `:${this._port}`; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + return result; + } + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text) { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} +class URLToken { + constructor(text, type) { + this.text = text; + this.type = type; + } + static scheme(text) { + return new URLToken(text, "SCHEME"); + } + static host(text) { + return new URLToken(text, "HOST"); + } + static port(text) { + return new URLToken(text, "PORT"); + } + static path(text) { + return new URLToken(text, "PATH"); + } + static query(text) { + return new URLToken(text, "QUERY"); + } +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +function isAlphaNumericCharacter(character) { + const characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +class URLTokenizer { + constructor(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current() { + return this._currentToken; + } + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next() { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + let endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + let result = ""; + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer, ...terminatingCharacters) { + return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +} +function nextScheme(tokenizer) { + const scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + const host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + const port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + const path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + const query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} + +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + +// Copyright (c) Microsoft Corporation. +const RedactedString = "REDACTED"; +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +class Sanitizer { + constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { + allowedHeaderNames = Array.isArray(allowedHeaderNames) + ? defaultAllowedHeaderNames.concat(allowedHeaderNames) + : defaultAllowedHeaderNames; + allowedQueryParameters = Array.isArray(allowedQueryParameters) + ? defaultAllowedQueryParameters.concat(allowedQueryParameters) + : defaultAllowedQueryParameters; + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "_headersMap") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(value) { + return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); + } + sanitizeQuery(value) { + return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); + } + sanitizeObject(value, allowedKeys, accessor) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (allowedKeys.has(k.toLowerCase())) { + sanitized[k] = accessor(value, k); + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const urlBuilder = URLBuilder.parse(value); + const queryString = urlBuilder.getQuery(); + if (!queryString) { + return value; + } + const query = URLQuery.parse(queryString); + for (const k of query.keys()) { + if (!this.allowedQueryParameters.has(k.toLowerCase())) { + query.set(k, RedactedString); + } + } + urlBuilder.setQuery(query.toString()); + return urlBuilder.toString(); + } +} + +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + +// Copyright (c) Microsoft Corporation. +const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ +class RestError extends Error { + constructor(message, code, statusCode, request, response) { + super(message); + this.name = "RestError"; + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; + +// Copyright (c) Microsoft Corporation. +const logger = logger$1.createClientLogger("core-http"); + +// Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} +class ReportTransform extends stream.Transform { + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + this.loadedBytes = 0; + } + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(undefined); + } +} +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + async sendRequest(httpRequest) { + var _a; + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + const abortController$1 = new abortController.AbortController(); + let abortListener; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new abortController.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController$1.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(() => { + abortController$1.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + const formData = httpRequest.formData; + const requestForm = new FormData__default["default"](); + const appendFormValue = (key, value) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + const onUploadProgress = httpRequest.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const platformSpecificRequestInit = await this.prepareRequest(httpRequest); + const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, + // the types for RequestInit are from the browser, which expects AbortSignal to + // have `reason` and `throwIfAborted`, but these don't exist on our polyfill + // for Node. + signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); + let operationResponse; + try { + const response = await this.fetch(httpRequest.url, requestInit); + const headers = parseHeaders(response.headers); + const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined, + bodyAsText: !streaming ? await response.text() : undefined, + }; + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + const length = parseInt(headers.get("Content-Length")) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + await this.processRequest(operationResponse); + return operationResponse; + } + catch (error) { + const fetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new abortController.AbortError("The operation was aborted."); + } + throw fetchError; + } + finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + getOrCreateAgent(httpRequest) { + var _a; + const isHttps = isUrlHttps(httpRequest.url); + // At the moment, proxy settings and keepAlive are mutually + // exclusive because the 'tunnel' library currently lacks the + // ability to create a proxy with keepAlive turned on. + if (httpRequest.proxySettings) { + const { host, port, username, password } = httpRequest.proxySettings; + const key = `${host}:${port}:${username}:${password}`; + const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; + let agent = getCachedAgent(isHttps, proxyAgents); + if (agent) { + return agent; + } + const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + agent = tunnel.agent; + if (tunnel.isHttps) { + proxyAgents.httpsAgent = tunnel.agent; + } + else { + proxyAgents.httpAgent = tunnel.agent; + } + this.proxyAgentMap.set(key, proxyAgents); + return agent; + } + else if (httpRequest.keepAlive) { + let agent = getCachedAgent(isHttps, this.keepAliveAgents); + if (agent) { + return agent; + } + const agentOptions = { + keepAlive: httpRequest.keepAlive, + }; + if (isHttps) { + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); + } + else { + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); + } + return agent; + } + else { + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; + } + } + /** + * Uses `node-fetch` to perform the request. + */ + // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs + async fetch(input, init) { + return node_fetch__default["default"](input, init); + } + /** + * Prepares a request based on the provided web resource. + */ + async prepareRequest(httpRequest) { + const requestInit = {}; + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return requestInit; + } + /** + * Process an HTTP response. + */ + async processRequest(_operationResponse) { + /* no_op */ + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + +// Copyright (c) Microsoft Corporation. +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +function operationOptionsToRequestOptionsBase(opts) { + const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); + let result = additionalOptions; + if (requestOptions) { + result = Object.assign(Object.assign({}, result), requestOptions); + } + if (tracingOptions) { + result.tracingContext = tracingOptions.tracingContext; + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ +class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +class RequestPolicyOptions { + constructor(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed +// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 +// By creating a new copy of the settings each time we instantiate the parser, +// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. +const xml2jsDefaultOptionsV2 = { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: XML_ATTRKEY, + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: undefined, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: "$$", + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: undefined, + attrValueProcessors: undefined, + tagNameProcessors: undefined, + valueProcessors: undefined, + rootName: "root", + xmldec: { + version: "1.0", + encoding: "UTF-8", + standalone: true, + }, + doctype: undefined, + renderOpts: { + pretty: true, + indent: " ", + newline: "\n", + }, + headless: false, + chunkSize: 10000, + emptyTag: "", + cdata: false, +}; +// The xml2js settings for general XML parsing operations. +const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsParserSettings.explicitArray = false; +// The xml2js settings for general XML building operations. +const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsBuilderSettings.explicitArray = false; +xml2jsBuilderSettings.renderOpts = { + pretty: false, +}; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +function stringifyXML(obj, opts = {}) { + var _a; + xml2jsBuilderSettings.rootName = opts.rootName; + xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); + return builder.buildObject(obj); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +function parseXML(str, opts = {}) { + var _a; + xml2jsParserSettings.explicitRoot = !!opts.includeRoot; + xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} + +// Copyright (c) Microsoft Corporation. +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +function deserializationPolicy(deserializationContentTypes, parsingOptions) { + return { + create: (nextPolicy, options) => { + return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); + }, + }; +} +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +const DefaultDeserializationOptions = { + expectedContentTypes: { + json: defaultJsonContentTypes, + xml: defaultXmlContentTypes, + }, +}; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +class DeserializationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { + var _a; + super(nextPolicy, requestPolicyOptions); + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + } + async sendRequest(request) { + return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { + xmlCharKey: this.xmlCharKey, + })); + } +} +function getOperationResponse(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationSpec = request.operationSpec; + if (operationSpec) { + const operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const shouldDeserialize = parsedResponse.request.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationSpec = parsedResponse.request.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponse(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (innerError) { + const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders", options); + } + } + return parsedResponse; + }); +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + parsedResponse.request.streamResponseBody; + const initialErrorMessage = streaming + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let parsedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; + } + parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); + } + const internalError = parsedBody.error || parsedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = parsedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + var _a; + const errorHandler = (err) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + return Promise.reject(e); + }; + const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + operationResponse.request.streamResponseBody; + if (!streaming && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text, opts) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} + +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const DEFAULT_CLIENT_RETRY_COUNT = 3; +// intervals are in ms +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +function isNumber(n) { + return typeof n === "number"; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(retryLimit, predicate, retryData, response, error) { + if (!predicate(response, error)) { + return false; + } + return retryData.retryCount < retryLimit; +} +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + const boundedRandDelta = retryOptions.retryInterval * 0.8 + + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); + return retryData; +} + +// Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, + }; +} +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; +(function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; +})(exports.RetryMode || (exports.RetryMode = {})); +const DefaultRetryOptions = { + maxRetries: DEFAULT_CLIENT_RETRY_COUNT, + retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, +}; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); + } +} +async function retry$1(policy, request, response, retryData, requestError) { + function shouldPolicyRetry(responseParam) { + const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { + return false; + } + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; + } + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval, + }, retryData, requestError); + const isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { + logger.info(`Retrying request in ${retryData.retryInterval}`); + try { + await coreUtil.delay(retryData.retryInterval); + const res = await policy._nextPolicy.sendRequest(request.clone()); + return retry$1(policy, request, res, retryData); + } + catch (err) { + return retry$1(policy, request, response, retryData, err); + } + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreUtil.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + create: (nextPolicy, options) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _requestIdHeaderName) { + super(nextPolicy, options); + this._requestIdHeaderName = _requestIdHeaderName; + } + sendRequest(request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, request.requestId); + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; +} + +// Copyright (c) Microsoft Corporation. +function ndJsonPolicy() { + return { + create: (nextPolicy, options) => { + return new NdJsonPolicy(nextPolicy, options); + }, + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; + } + sendRequest(request) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +function rpRegistrationPolicy(retryTimeout = 30) { + return { + create: (nextPolicy, options) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +class RPRegistrationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _retryTimeout = 30) { + super(nextPolicy, options); + this._retryTimeout = _retryTimeout; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param originalRequest - The original request + * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. + * @returns A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo = false) { + const reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param body - The response body received after making the original request. + * @returns The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param url - The original request url + * @returns The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} +/** + * Registers the given provider. + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param provider - The provider name to be registered. + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + */ +async function registerRP(policy, urlPrefix, provider, originalRequest) { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + const response = await policy._nextPolicy.sendRequest(reqOptions); + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param url - The request url for polling + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns True if RP Registration is successful. + */ +async function getRegistrationStatus(policy, url, originalRequest) { + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + const res = await policy._nextPolicy.sendRequest(reqOptions); + const obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + await coreUtil.delay(policy._retryTimeout * 1000); + return getRegistrationStatus(policy, url, originalRequest); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +function signingPolicy(authenticationProvider) { + return { + create: (nextPolicy, options) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +class SystemErrorRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} +async function retry(policy, request, operationResponse, err, retryData) { + retryData = updateRetryData(policy, retryData, err); + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; + } + if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await coreUtil.delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } + catch (nestedErr) { + return retry(policy, request, operationResponse, nestedErr, retryData); + } + } + else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Maximum number of retries for the throttling retry policy + */ +const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; + +// Copyright (c) Microsoft Corporation. +const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +function throttlingRetryPolicy() { + return { + create: (nextPolicy, options) => { + return new ThrottlingRetryPolicy(nextPolicy, options); + }, + }; +} +const StandardAbortMessage = "The operation was aborted."; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +class ThrottlingRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _handleResponse) { + super(nextPolicy, options); + this.numberOfRetries = 0; + this._handleResponse = _handleResponse || this._defaultResponseHandler; + } + async sendRequest(httpRequest) { + const response = await this._nextPolicy.sendRequest(httpRequest.clone()); + if (response.status !== StatusCodes.TooManyRequests && + response.status !== StatusCodes.ServiceUnavailable) { + return response; + } + else { + return this._handleResponse(httpRequest, response); + } + } + async _defaultResponseHandler(httpRequest, httpResponse) { + var _a; + const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + this.numberOfRetries += 1; + await coreUtil.delay(delayInMs, { + abortSignal: httpRequest.abortSignal, + abortErrorMsg: StandardAbortMessage, + }); + if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new abortController.AbortError(StandardAbortMessage); + } + if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { + return this.sendRequest(httpRequest); + } + else { + return this._nextPolicy.sendRequest(httpRequest); + } + } + } + return httpResponse; + } + static parseRetryAfterHeader(headerValue) { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + } + static parseDateRetryAfterHeader(headerValue) { + try { + const now = Date.now(); + const date = Date.parse(headerValue); + const diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + } +} + +// Copyright (c) Microsoft Corporation. +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "", + namespace: "", +}); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +function tracingPolicy(tracingOptions = {}) { + return { + create(nextPolicy, options) { + return new TracingPolicy(nextPolicy, options, tracingOptions); + }, + }; +} +/** + * A policy that wraps outgoing requests with a tracing span. + */ +class TracingPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, tracingOptions) { + super(nextPolicy, options); + this.userAgent = tracingOptions.userAgent; + } + async sendRequest(request) { + if (!request.tracingContext) { + return this._nextPolicy.sendRequest(request); + } + const span = this.tryCreateSpan(request); + if (!span) { + return this._nextPolicy.sendRequest(request); + } + try { + const response = await this._nextPolicy.sendRequest(request); + this.tryProcessResponse(span, response); + return response; + } + catch (err) { + this.tryProcessError(span, err); + throw err; + } + } + tryCreateSpan(request) { + var _a; + try { + // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. + // We can pass this as a separate parameter once we upgrade to the latest core-tracing. + const { span } = createSpan(`HTTP ${request.method}`, { + tracingOptions: { + spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), + tracingContext: request.tracingContext, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); + if (typeof namespaceFromContext === "string") { + span.setAttribute("az.namespace", namespaceFromContext); + } + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + // set headers + const spanContext = span.spanContext(); + const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); + if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { + request.headers.set("traceparent", traceParentHeader); + const traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return span; + } + catch (error) { + logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); + return undefined; + } + } + tryProcessError(span, err) { + try { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: err.message, + }); + if (err.statusCode) { + span.setAttribute("http.status_code", err.statusCode); + } + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } + tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.OK, + }); + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ServiceClient sends service requests and receives responses. + */ +class ServiceClient { + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials, + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ + options) { + if (!options) { + options = {}; + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + let requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + logger.info("ServiceClient: using custom request policies"); + requestPolicyFactories = options.requestPolicyFactories; + } + else { + let authPolicyFactory = undefined; + if (coreAuth.isTokenCredential(credentials)) { + logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); + // Create a wrapped RequestPolicyFactory here so that we can provide the + // correct scope to the BearerTokenAuthenticationPolicy at the first time + // one is requested. This is needed because generated ServiceClient + // implementations do not set baseUri until after ServiceClient's constructor + // is finished, leaving baseUri empty at the time when it is needed to + // build the correct scope name. + const wrappedPolicyFactory = () => { + let bearerTokenPolicyFactory = undefined; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const serviceClient = this; + const serviceClientOptions = options; + return { + create(nextPolicy, createOptions) { + const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + if (!credentialScopes) { + throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + } + if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { + bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); + } + return bearerTokenPolicyFactory.create(nextPolicy, createOptions); + }, + }; + }; + authPolicyFactory = wrappedPolicyFactory(); + } + else if (credentials && typeof credentials.signRequest === "function") { + logger.info("ServiceClient: creating signing policy from provided credentials"); + authPolicyFactory = signingPolicy(credentials); + } + else if (credentials !== undefined && credentials !== null) { + throw new Error("The credentials argument must implement the TokenCredential interface"); + } + logger.info("ServiceClient: using default request policies"); + requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); + if (options.requestPolicyFactories) { + // options.requestPolicyFactories can also be a function that manipulates + // the default requestPolicyFactories array + const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + sendRequest(options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + let httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + let httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + async sendOperationRequest(operationArguments, operationSpec, callback) { + var _a; + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const httpRequest = new WebResource(); + let result; + try { + const baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + const requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); + } + let rawResponse; + let sendRequestError; + try { + rawResponse = await this.sendRequest(httpRequest); + } + catch (error) { + sendRequestError = error; + } + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); + } + result = Promise.reject(sendRequestError); + } + else { + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); + } + } + catch (error) { + result = Promise.reject(error); + } + const cb = callback; + if (cb) { + result + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + return result; + } +} +function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + var _a, _b, _c, _d, _e, _f; + const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + const updatedOptions = { + rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", + includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, + }; + const xmlCharKey = serializerOptions.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperType.String && + (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + let result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(authPolicyFactory, options) { + const factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (authPolicyFactory) { + factories.push(authPolicyFactory); + } + const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + factories.push(redirectPolicy()); + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + if (coreUtil.isNode) { + factories.push(proxyPolicy(options.proxySettings)); + } + factories.push(logPolicy({ logger: logger.info })); + return factories; +} +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { + const requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } + let userAgentValue = undefined; + if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { + const userAgentInfo = []; + userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); + // Add the default user agent value if it isn't already specified + // by the userAgentPrefix option. + const defaultUserAgentInfo = getDefaultUserAgentValue(); + if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { + userAgentInfo.push(defaultUserAgentInfo); + } + userAgentValue = userAgentInfo.join(" "); + } + const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + if (coreUtil.isNode) { + requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); + } + const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); + if (redirectOptions.handleRedirects) { + requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); + } + if (authPolicyFactory) { + requestPolicyFactories.push(authPolicyFactory); + } + requestPolicyFactories.push(logPolicy(loggingOptions)); + if (coreUtil.isNode && pipelineOptions.decompressResponse === false) { + requestPolicyFactories.push(disableResponseDecompressionPolicy()); + } + return { + httpClient: pipelineOptions.httpClient, + requestPolicyFactories, + }; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var _a; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); + if (propertyValue !== undefined && propertyValue !== null) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent !== undefined && parent !== null && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +function flattenResponse(_response, responseSpec) { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const addOperationResponse = (obj) => { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (typeName === "Sequence" || isPageableResponse) { + const arrayResponse = [...(_response.parsedBody || [])]; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); +} +function getCredentialScopes(options, baseUri) { + if (options === null || options === void 0 ? void 0 : options.credentialScopes) { + return options.credentialScopes; + } + if (baseUri) { + return `${baseUri}/.default`; + } + return undefined; +} + +// Copyright (c) Microsoft Corporation. +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +function createSpanFunction(args) { + return coreTracing.createSpanFunction(args); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +class ExpiringAccessTokenCache { + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + this.cachedToken = undefined; + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +class AccessTokenRefresher { + constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { + this.credential = credential; + this.scopes = scopes; + this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; + this.lastCalled = 0; + } + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady() { + // We're only ready for a new refresh if the required milliseconds have passed. + return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); + } + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + async getToken(options) { + this.lastCalled = Date.now(); + const token = await this.credential.getToken(this.scopes, options); + this.promise = undefined; + return token || undefined; + } + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options) { + if (!this.promise) { + this.promise = this.getToken(options); + } + return this.promise; + } +} + +// Copyright (c) Microsoft Corporation. +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +class BasicAuthenticationCredentials { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Authenticates to a service using an API key. + */ +class ApiKeyCredentials { + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + if (!webResource) { + return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + return Promise.resolve(webResource); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} + +Object.defineProperty(exports, 'delay', { + enumerable: true, + get: function () { return coreUtil.delay; } +}); +Object.defineProperty(exports, 'isNode', { + enumerable: true, + get: function () { return coreUtil.isNode; } +}); +Object.defineProperty(exports, 'isTokenCredential', { + enumerable: true, + get: function () { return coreAuth.isTokenCredential; } +}); +exports.AccessTokenRefresher = AccessTokenRefresher; +exports.ApiKeyCredentials = ApiKeyCredentials; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; +exports.Constants = Constants; +exports.DefaultHttpClient = NodeFetchHttpClient; +exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; +exports.HttpHeaders = HttpHeaders; +exports.MapperType = MapperType; +exports.RequestPolicyOptions = RequestPolicyOptions; +exports.RestError = RestError; +exports.Serializer = Serializer; +exports.ServiceClient = ServiceClient; +exports.TopicCredentials = TopicCredentials; +exports.URLBuilder = URLBuilder; +exports.URLQuery = URLQuery; +exports.WebResource = WebResource; +exports.XML_ATTRKEY = XML_ATTRKEY; +exports.XML_CHARKEY = XML_CHARKEY; +exports.applyMixins = applyMixins; +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +exports.createPipelineFromOptions = createPipelineFromOptions; +exports.createSpanFunction = createSpanFunction; +exports.deserializationPolicy = deserializationPolicy; +exports.deserializeResponseBody = deserializeResponseBody; +exports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy; +exports.encodeUri = encodeUri; +exports.executePromisesSequentially = executePromisesSequentially; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.flattenResponse = flattenResponse; +exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; +exports.generateUuid = generateUuid; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.getDefaultUserAgentValue = getDefaultUserAgentValue; +exports.isDuration = isDuration; +exports.isValidUuid = isValidUuid; +exports.keepAlivePolicy = keepAlivePolicy; +exports.logPolicy = logPolicy; +exports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase; +exports.parseXML = parseXML; +exports.promiseToCallback = promiseToCallback; +exports.promiseToServiceCallback = promiseToServiceCallback; +exports.proxyPolicy = proxyPolicy; +exports.redirectPolicy = redirectPolicy; +exports.serializeObject = serializeObject; +exports.signingPolicy = signingPolicy; +exports.stringifyXML = stringifyXML; +exports.stripRequest = stripRequest; +exports.stripResponse = stripResponse; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.tracingPolicy = tracingPolicy; +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-http/dist/index.js.map b/node_modules/@azure/core-http/dist/index.js.map new file mode 100644 index 0000000..7d42cd4 --- /dev/null +++ b/node_modules/@azure/core-http/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/httpHeaders.ts","../src/util/base64.ts","../src/util/constants.ts","../src/util/serializer.common.ts","../src/util/utils.ts","../src/serializer.ts","../src/webResource.ts","../src/url.ts","../src/proxyAgent.ts","../src/util/sanitizer.ts","../src/util/inspect.ts","../src/restError.ts","../src/log.ts","../src/nodeFetchHttpClient.ts","../src/httpPipelineLogLevel.ts","../src/operationOptions.ts","../src/policies/requestPolicy.ts","../src/util/xml.ts","../src/policies/deserializationPolicy.ts","../src/policies/keepAlivePolicy.ts","../src/policies/redirectPolicy.ts","../src/util/exponentialBackoffStrategy.ts","../src/policies/exponentialRetryPolicy.ts","../src/policies/logPolicy.ts","../src/operationParameter.ts","../src/operationSpec.ts","../src/policies/msRestUserAgentPolicy.ts","../src/policies/userAgentPolicy.ts","../src/queryCollectionFormat.ts","../src/policies/bearerTokenAuthenticationPolicy.ts","../src/policies/disableResponseDecompressionPolicy.ts","../src/policies/generateClientRequestIdPolicy.ts","../src/httpClientCache.ts","../src/policies/ndJsonPolicy.ts","../src/policies/proxyPolicy.ts","../src/policies/rpRegistrationPolicy.ts","../src/policies/signingPolicy.ts","../src/policies/systemErrorRetryPolicy.ts","../src/util/throttlingRetryStrategy.ts","../src/policies/throttlingRetryPolicy.ts","../src/policies/tracingPolicy.ts","../src/serviceClient.ts","../src/createSpanLegacy.ts","../src/credentials/accessTokenCache.ts","../src/credentials/accessTokenRefresher.ts","../src/credentials/basicAuthenticationCredentials.ts","../src/credentials/apiKeyCredentials.ts","../src/credentials/topicCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string): string {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(options?: { preserveCase?: boolean }): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n rawHeaders: unknown;\n clone: unknown;\n get: unknown;\n set: unknown;\n contains: unknown;\n remove: unknown;\n headersArray: unknown;\n headerValues: unknown;\n headerNames: unknown;\n toJson: unknown;\n };\n if (\n typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\"\n ) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders implements HttpHeadersLike {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString().trim(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n return this.toJson({ preserveCase: true });\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header values that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(options: { preserveCase?: boolean } = {}): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n if (options.preserveCase) {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name] = header.value;\n }\n } else {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[getHeaderKey(header.name)] = header.value;\n }\n }\n return result;\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson({ preserveCase: true }));\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n const resultPreservingCasing: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n resultPreservingCasing[header.name] = header.value;\n }\n return new HttpHeaders(resultPreservingCasing);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A set of constants used internally when processing requests.\n */\nexport const Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"3.0.4\",\n\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n ServiceUnavailable: 503,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Default key used to access the XML attributes.\n */\nexport const XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nexport const XML_CHARKEY = \"_\";\n\n/**\n * Options to govern behavior of xml parser and builder.\n */\nexport interface SerializerOptions {\n /**\n * indicates the name of the root element in the resulting XML when building XML.\n */\n rootName?: string;\n /**\n * indicates whether the root element is to be included or not in the output when parsing XML.\n */\n includeRoot?: boolean;\n /**\n * key used to access the XML value content when parsing XML.\n */\n xmlCharKey?: string;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Constants } from \"./constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { XML_ATTRKEY } from \"./serializer.common\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nconst validUuidRegex =\n /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param urlToCheck - The url to check\n * @returns True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(\n promiseFactories: Array,\n kickstart: unknown\n): Promise {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param err - The error occurred if any, while executing the request; otherwise null.\n * @param result - The deserialized response body if an error did not occur.\n * @param request - The raw/actual request sent to the server if an error did not occur.\n * @param response - The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function promiseToCallback(promise: Promise): (cb: Function) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return (cb: Function): void => {\n promise\n .then((data: any) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch((err: Error) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(\n promise: Promise\n): (cb: ServiceCallback) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise\n .then((data: HttpOperationResponse) => {\n return process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n })\n .catch((err: Error) => {\n process.nextTick(cb, err);\n });\n };\n}\n\nexport function prepareXMLRootList(\n obj: unknown,\n elementName: string,\n xmlNamespaceKey?: string,\n xmlNamespace?: string\n): { [s: string]: any } {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n\n if (!xmlNamespaceKey || !xmlNamespace) {\n return { [elementName]: obj };\n }\n\n const result = { [elementName]: obj };\n result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };\n return result;\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void {\n const castTargetCtorParam = targetCtorParam as {\n prototype: Record;\n };\n sourceCtors.forEach((sourceCtor) => {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\n\nconst validateISODuration =\n /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: unknown): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n/**\n * @internal\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * @internal\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/* eslint-disable eqeqeq */\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\n\n// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications.\n\n/**\n * Used to map raw response objects to final shapes.\n * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON.\n * Also allows pulling values from headers, as well as inserting default values and constants.\n */\nexport class Serializer {\n constructor(\n /**\n * The provided model mapper.\n */\n public readonly modelMappers: { [key: string]: any } = {},\n /**\n * Whether the contents are XML or not.\n */\n public readonly isXML?: boolean\n ) {}\n\n /**\n * Validates constraints, if any. This function will throw if the provided value does not respect those constraints.\n * @param mapper - The definition of data models.\n * @param value - The value.\n * @param objectName - Name of the object. Used in the error messages.\n * @deprecated Removing the constraints validation on client side.\n */\n validateConstraints(mapper: Mapper, value: unknown, objectName: string): void {\n const failValidation = (\n constraintName: keyof MapperConstraints,\n constraintValue: any\n ): Error => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const valueAsNumber = value as number;\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n const valueAsArray = value as any[];\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n valueAsArray.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param object - A valid Javascript object to be serialized.\n * @param objectName - Name of the serialized object.\n * @param options - additional options to deserialization.\n * @returns A valid serialized Javascript object.\n */\n serialize(\n mapper: Mapper,\n object: unknown,\n objectName?: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/i) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(\n this,\n mapper as SequenceMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(\n this,\n mapper as CompositeMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param responseBody - A valid Javascript entity to be deserialized.\n * @param objectName - Name of the deserialized object.\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object.\n */\n deserialize(\n mapper: Mapper,\n responseBody: unknown,\n objectName: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(\n this,\n mapper as CompositeMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else {\n if (this.isXML) {\n const xmlCharKey = updatedOptions.xmlCharKey;\n const castResponseBody = responseBody as Record;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (\n castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined\n ) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody as string);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody as string);\n } else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody as number);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = base64.decodeString(responseBody as string);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody as string);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(\n this,\n mapper as SequenceMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string): string {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/i) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = base64.encodeByteArray(value);\n }\n return returnValue;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string): any {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any[] {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n const serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n\n if (isXml && elementType.xmlNamespace) {\n const xmlnsKey = elementType.xmlNamespacePrefix\n ? `xmlns:${elementType.xmlNamespacePrefix}`\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = { ...serializedValue };\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n } else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n }\n } else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): { [key: string]: any } {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n const serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : \"xmlns\";\n\n const result = tempDictionary;\n result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };\n return result;\n }\n\n return tempDictionary;\n}\n\n/**\n * Resolves the additionalProperties property from a referenced mapper.\n * @param serializer - The serializer containing the entire set of mappers.\n * @param mapper - The composite mapper to resolve.\n * @param objectName - Name of the object being serialized.\n */\nfunction resolveAdditionalProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): SequenceMapper | BaseMapper | CompositeMapper | DictionaryMapper | EnumMapper | undefined {\n const additionalProperties = mapper.type.additionalProperties;\n\n if (!additionalProperties && mapper.type.className) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper?.type.additionalProperties;\n }\n\n return additionalProperties;\n}\n\n/**\n * Finds the mapper referenced by `className`.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): CompositeMapper | undefined {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n return serializer.modelMappers[className];\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${mapper.type.className}\".`);\n }\n modelProps = modelMapper?.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(modelMapper)}\" of type \"${\n mapper.type.className\n }\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (\n childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)\n ) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix\n ? `xmlns:${mapper.xmlNamespacePrefix}`\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = {\n ...parentObject[XML_ATTRKEY],\n [xmlnsKey]: mapper.xmlNamespace,\n };\n }\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName,\n options\n );\n\n if (serializedValue !== undefined && propName != undefined) {\n const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n } else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: value };\n } else {\n parentObject[propName] = value;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]',\n options\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction getXmlObjectValue(\n propertyMapper: Mapper,\n serializedValue: any,\n isXml: boolean,\n options: Required\n): any {\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n\n const xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? `xmlns:${propertyMapper.xmlNamespacePrefix}`\n : \"xmlns\";\n const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };\n\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n } else {\n const result: any = { ...serializedValue };\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n }\n }\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\n\nfunction isSpecialXmlProperty(propertyName: string, options: Required): boolean {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any {\n const xmlCharKey = options.xmlCharKey ?? XML_CHARKEY;\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName,\n options\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody[XML_ATTRKEY][xmlName!],\n propertyObjectName,\n options\n );\n } else if (propertyMapper.xmlIsMsText) {\n if (responseBody[xmlCharKey] !== undefined) {\n instance[key] = responseBody[xmlCharKey];\n } else if (typeof responseBody === \"string\") {\n // The special case where xml parser parses \"content\" into JSON of\n // `{ name: \"content\"}` instead of `{ name: { \"_\": \"content\" }}`\n instance[key] = responseBody;\n }\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n const wrapped = responseBody[xmlName!];\n const elementList = wrapped?.[xmlElementName!] ?? [];\n instance[key] = serializer.deserialize(\n propertyMapper,\n elementList,\n propertyObjectName,\n options\n );\n handledPropertyNames.push(xmlName!);\n } else {\n const property = responseBody[propertyName!];\n instance[key] = serializer.deserialize(\n propertyMapper,\n property,\n propertyObjectName,\n options\n );\n handledPropertyNames.push(propertyName!);\n }\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [k, v] of Object.entries(instance)) {\n if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {\n arrayInstance[k] = v;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string): boolean => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]',\n options\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): { [key: string]: any } {\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any[] {\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(\n element,\n responseBody[i],\n `${objectName}[${i}]`,\n options\n );\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string): any {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\n/**\n * Description of various value constraints such as integer ranges and string regex.\n */\nexport interface MapperConstraints {\n /**\n * The value should be less than or equal to the `InclusiveMaximum` value.\n */\n InclusiveMaximum?: number;\n /**\n * The value should be less than the `ExclusiveMaximum` value.\n */\n ExclusiveMaximum?: number;\n /**\n * The value should be greater than or equal to the `InclusiveMinimum` value.\n */\n InclusiveMinimum?: number;\n /**\n * The value should be greater than the `InclusiveMinimum` value.\n */\n ExclusiveMinimum?: number;\n /**\n * The length should be smaller than the `MaxLength`.\n */\n MaxLength?: number;\n /**\n * The length should be bigger than the `MinLength`.\n */\n MinLength?: number;\n /**\n * The value must match the pattern.\n */\n Pattern?: RegExp;\n /**\n * The value must contain fewer items than the MaxItems value.\n */\n MaxItems?: number;\n /**\n * The value must contain more items than the `MinItems` value.\n */\n MinItems?: number;\n /**\n * The value must contain only unique items.\n */\n UniqueItems?: true;\n /**\n * The value should be exactly divisible by the `MultipleOf` value.\n */\n MultipleOf?: number;\n}\n\n/**\n * Type of the mapper. Includes known mappers.\n */\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\n/**\n * The type of a simple mapper.\n */\nexport interface SimpleMapperType {\n /**\n * Name of the type of the property.\n */\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\n/**\n * Helps build a mapper that describes how to map a set of properties of an object based on other mappers.\n *\n * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`.\n */\nexport interface CompositeMapperType {\n /**\n * Name of the composite mapper type.\n */\n name: \"Composite\";\n\n /**\n * Use `className` to reference another type definition.\n */\n className?: string;\n\n /**\n * Use `modelProperties` when the reference to the other type has been resolved.\n */\n modelProperties?: { [propertyName: string]: Mapper };\n\n /**\n * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object.\n */\n additionalProperties?: Mapper;\n\n /**\n * The name of the top-most parent scheme, the one that has no parents.\n */\n uberParent?: string;\n\n /**\n * A polymorphic discriminator.\n */\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\n/**\n * Helps build a mapper that describes how to parse a sequence of mapped values.\n */\nexport interface SequenceMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Sequence\";\n /**\n * The mapper to use to map each one of the properties of the sequence.\n */\n element: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse a dictionary of mapped values.\n */\nexport interface DictionaryMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Dictionary\";\n /**\n * The mapper to use to map the value of each property in the dictionary.\n */\n value: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse an enum value.\n */\nexport interface EnumMapperType {\n /**\n * Name of the enum type mapper.\n */\n name: \"Enum\";\n /**\n * Values allowed by this mapper.\n */\n allowedValues: any[];\n}\n\n/**\n * The base definition of a mapper. Can be used for XML and plain JavaScript objects.\n */\nexport interface BaseMapper {\n /**\n * Name for the xml element\n */\n xmlName?: string;\n /**\n * Xml element namespace\n */\n xmlNamespace?: string;\n /**\n * Xml element namespace prefix\n */\n xmlNamespacePrefix?: string;\n /**\n * Determines if the current property should be serialized as an attribute of the parent xml element\n */\n xmlIsAttribute?: boolean;\n /**\n * Determines if the current property should be serialized as the inner content of the xml element\n */\n xmlIsMsText?: boolean;\n /**\n * Name for the xml elements when serializing an array\n */\n xmlElementName?: string;\n /**\n * Whether or not the current property should have a wrapping XML element\n */\n xmlIsWrapped?: boolean;\n /**\n * Whether or not the current property is readonly\n */\n readOnly?: boolean;\n /**\n * Whether or not the current property is a constant\n */\n isConstant?: boolean;\n /**\n * Whether or not the current property is required\n */\n required?: boolean;\n /**\n * Whether or not the current property allows mull as a value\n */\n nullable?: boolean;\n /**\n * The name to use when serializing\n */\n serializedName?: string;\n /**\n * Type of the mapper\n */\n type: MapperType;\n /**\n * Default value when one is not explicitly provided\n */\n defaultValue?: any;\n /**\n * Constraints to test the current value against\n */\n constraints?: MapperConstraints;\n}\n\n/**\n * Mappers are definitions of the data models used in the library.\n * These data models are part of the Operation or Client definitions in the responses or parameters.\n */\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\n/**\n * Used to disambiguate discriminated type unions.\n * For example, if response can have many shapes but also includes a 'kind' field (or similar),\n * that field can be used to determine how to deserialize the response to the correct type.\n */\nexport interface PolymorphicDiscriminator {\n /**\n * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`.\n */\n serializedName: string;\n /**\n * Name to use on the resulting object instead of the original property name.\n * Useful since the JSON property could be difficult to work with.\n * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`.\n */\n clientName: string;\n /**\n * It may contain any other property.\n */\n [key: string]: string;\n}\n\n/**\n * A mapper composed of other mappers.\n */\nexport interface CompositeMapper extends BaseMapper {\n /**\n * The type descriptor of the `CompositeMapper`.\n */\n type: CompositeMapperType;\n}\n\n/**\n * A mapper describing arrays.\n */\nexport interface SequenceMapper extends BaseMapper {\n /**\n * The type descriptor of the `SequenceMapper`.\n */\n type: SequenceMapperType;\n}\n\n/**\n * A mapper describing plain JavaScript objects used as key/value pairs.\n */\nexport interface DictionaryMapper extends BaseMapper {\n /**\n * The type descriptor of the `DictionaryMapper`.\n */\n type: DictionaryMapperType;\n /**\n * Optionally, a prefix to add to the header collection.\n */\n headerCollectionPrefix?: string;\n}\n\n/**\n * A mapper describing an enum value.\n */\nexport interface EnumMapper extends BaseMapper {\n /**\n * The type descriptor of the `EnumMapper`.\n */\n type: EnumMapperType;\n}\n\n/**\n * An interface representing an URL parameter value.\n */\nexport interface UrlParameterValue {\n /**\n * The URL value.\n */\n value: string;\n /**\n * Whether to keep or skip URL encoding.\n */\n skipUrlEncoding: boolean;\n}\n\n/**\n * Utility function that serializes an object that might contain binary information into a plain object, array or a string.\n */\nexport function serializeObject(toSerialize: unknown): any {\n const castToSerialize = toSerialize as Record;\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\n/**\n * String enum containing the string types of property mappers.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Context, SpanOptions } from \"@azure/core-tracing\";\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { SerializerOptions } from \"./util/serializer.common\";\nimport { generateUuid } from \"./util/utils\";\n\n/**\n * List of supported HTTP methods.\n */\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\n\n/**\n * Possible HTTP request body types\n */\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * A description of a HTTP request to be made to a remote server.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * A unique identifier for the request. Used for logging and tracing.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: unknown): object is WebResourceLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n url: unknown;\n method: unknown;\n headers: unknown;\n validateRequestProperties: unknown;\n prepare: unknown;\n clone: unknown;\n };\n if (\n typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\"\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nexport class WebResource implements WebResourceLike {\n /**\n * URL of the outgoing request.\n */\n url: string;\n /**\n * HTTP method to use.\n */\n method: HttpMethods;\n /**\n * Request body.\n */\n body?: any;\n /**\n * HTTP headers.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * Query added to the URL.\n */\n query?: { [key: string]: any };\n /**\n * Specification of the HTTP request.\n */\n operationSpec?: OperationSpec;\n /**\n * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination.\n */\n withCredentials: boolean;\n /**\n * How long to wait in milliseconds before aborting the request.\n */\n timeout: number;\n /**\n * What proxy to use, if necessary.\n */\n proxySettings?: ProxySettings;\n /**\n * Whether to keep the HTTP connections alive throughout requests.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * Unique identifier of the outgoing request.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * Tracing: Context used when creating Spans.\n */\n tracingContext?: Context;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: unknown,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n decompressResponse?: boolean,\n streamResponseStatusCodes?: Set\n ) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (\n options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\"\n ) {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2);\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in parameters: ${stringifiedPathParameters}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.decompressResponse,\n this.streamResponseStatusCodes\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\n/**\n * Options to prepare an outgoing HTTP request.\n */\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: `{ \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }`\n * - query-parameter-value in \"string\" format: `{ \"query-parameter-name\": \"query-parameter-value\"}`.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}`\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: `{ \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }`\n * - path-parameter-value in \"string\" format: `{ \"path-parameter-name\": \"path-parameter-value\" }`.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n /**\n * Form data, used to build the request body.\n */\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: Record;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Allows keeping track of the progress of uploading the outgoing request.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Allows keeping track of the progress of downloading the incoming response.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n /**\n * Value of the parameter.\n */\n value: any;\n /**\n * Disables URL encoding if set to true.\n */\n skipUrlEncoding: boolean;\n /**\n * Parameter values may contain any other property.\n */\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * May contain other properties.\n */\n [key: string]: any;\n\n /**\n * Options to override XML parsing/building behavior.\n */\n serializerOptions?: SerializerOptions;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Get the keys of the query string.\n */\n public keys(): string[] {\n return Object.keys(this._rawQuery);\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: unknown): void {\n const caseParameterValue = parameterValue as {\n toString: () => string;\n };\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n const newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n let tokenPath: string | undefined;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n /**\n * Serializes the URL as a string.\n * @returns the URL as a string.\n */\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n /**\n * Parses a given string URL into a new {@link URLBuilder}.\n */\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const host = URLBuilder.parse(proxySettings.host).getHost() as string;\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const isRequestHttps = isUrlHttps(requestUrl);\n const isProxyHttps = isUrlHttps(proxySettings.host);\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\nexport function isUrlHttps(url: string): boolean {\n const urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: tunnel.HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n\nfunction isValidPort(port: number): boolean {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { URLBuilder, URLQuery } from \"../url\";\nimport { UnknownObject, isObject } from \"./utils\";\n\nexport interface SanitizerOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n}\n\nconst RedactedString = \"REDACTED\";\n\nconst defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n\n \"Accept\",\n \"Accept-Encoding\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"WWW-Authenticate\",\n];\n\nconst defaultAllowedQueryParameters: string[] = [\"api-version\"];\n\nexport class Sanitizer {\n public allowedHeaderNames: Set;\n public allowedQueryParameters: Set;\n\n constructor({ allowedHeaderNames = [], allowedQueryParameters = [] }: SanitizerOptions = {}) {\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n\n this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));\n }\n\n public sanitize(obj: unknown): string {\n const seen = new Set();\n return JSON.stringify(\n obj,\n (key: string, value: unknown) => {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return {\n ...value,\n name: value.name,\n message: value.message,\n };\n }\n\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(value as UnknownObject);\n } else if (key === \"url\") {\n return this.sanitizeUrl(value as string);\n } else if (key === \"query\") {\n return this.sanitizeQuery(value as UnknownObject);\n } else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n } else if (key === \"response\") {\n // Don't log response again\n return undefined;\n } else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n } else if (Array.isArray(value) || isObject(value)) {\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n seen.add(value);\n }\n\n return value;\n },\n 2\n );\n }\n\n private sanitizeHeaders(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value);\n }\n\n private sanitizeQuery(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]);\n }\n\n private sanitizeObject(\n value: UnknownObject,\n allowedKeys: Set,\n accessor: (value: any, key: string) => any\n ): UnknownObject {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n\n const sanitized: UnknownObject = {};\n\n for (const k of Object.keys(value)) {\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n } else {\n sanitized[k] = RedactedString;\n }\n }\n\n return sanitized;\n }\n\n private sanitizeUrl(value: string): string {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n\n const urlBuilder = URLBuilder.parse(value);\n const queryString = urlBuilder.getQuery();\n\n if (!queryString) {\n return value;\n }\n\n const query = URLQuery.parse(queryString);\n for (const k of query.keys()) {\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { inspect } from \"util\";\n\nexport const custom = inspect.custom;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { Sanitizer } from \"./util/sanitizer\";\nimport { WebResourceLike } from \"./webResource\";\nimport { custom } from \"./util/inspect\";\n\nconst errorSanitizer = new Sanitizer();\n\n/**\n * An error resulting from an HTTP request to a service endpoint.\n */\nexport class RestError extends Error {\n /**\n * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.)\n */\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n /**\n * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete.\n */\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n /**\n * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT).\n */\n code?: string;\n /**\n * The HTTP status code of the response, if one was returned.\n */\n statusCode?: number;\n /**\n * Outgoing request.\n */\n request?: WebResourceLike;\n /**\n * Incoming response.\n */\n response?: HttpOperationResponse;\n /**\n * Any additional details. In the case of deserialization errors, can be the processed response.\n */\n details?: unknown;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ) {\n super(message);\n this.name = \"RestError\";\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n\n /**\n * Logging method for util.inspect in Node\n */\n [custom](): string {\n return `RestError: ${this.message} \\n ${errorSanitizer.sanitize(this)}`;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { createClientLogger } from \"@azure/logger\";\nexport const logger = createClientLogger(\"core-http\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport { AbortController, AbortError } from \"@azure/abort-controller\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxyAgent, createProxyAgent, isUrlHttps } from \"./proxyAgent\";\nimport { Readable, Transform } from \"stream\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport FormData from \"form-data\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\nimport { logger } from \"./log\";\nimport node_fetch from \"node-fetch\";\n\ninterface AgentCache {\n httpAgent?: http.Agent;\n httpsAgent?: https.Agent;\n}\n\nfunction getCachedAgent(\n isHttps: boolean,\n agentCache: AgentCache\n): http.Agent | https.Agent | undefined {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\n/**\n * String URLs used when calling to `fetch()`.\n */\nexport type CommonRequestInfo = string;\n\n/**\n * An object containing information about the outgoing HTTP request.\n */\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\n/**\n * An object containing information about the incoming HTTP response.\n */\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport class ReportTransform extends Transform {\n private loadedBytes: number = 0;\n _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback!({ loadedBytes: this.loadedBytes });\n callback(undefined);\n }\n\n constructor(private progressCallback: (progress: TransferProgressEvent) => void) {\n super();\n }\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable, aborter?: AbortController): Promise {\n return new Promise((resolve) => {\n stream.once(\"close\", () => {\n aborter?.abort();\n resolve();\n });\n stream.once(\"end\", resolve);\n stream.once(\"error\", resolve);\n });\n}\n\n/**\n * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike}\n */\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n\n/**\n * An HTTP client that uses `node-fetch`.\n */\nexport class NodeFetchHttpClient implements HttpClient {\n /**\n * Provides minimum viable error handling and the logic that executes the abstract methods.\n * @param httpRequest - Object representing the outgoing HTTP request.\n * @returns An object representing the incoming HTTP response.\n */\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n const onUploadProgress = httpRequest.onUploadProgress;\n const uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n // the types for RequestInit are from the browser, which expects AbortSignal to\n // have `reason` and `throwIfAborted`, but these don't exist on our polyfill\n // for Node.\n signal: abortController.signal as any,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n\n const streaming =\n httpRequest.streamResponseStatusCodes?.has(response.status) ||\n httpRequest.streamResponseBody;\n\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? (response.body as unknown as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !streaming ? await response.text() : undefined,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n const downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error: any) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(\n operationResponse!.readableStreamBody,\n abortController\n );\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((e) => {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n }\n }\n\n // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent\n private proxyAgentMap: Map = new Map();\n private keepAliveAgents: AgentCache = {};\n\n private getOrCreateAgent(httpRequest: WebResourceLike): http.Agent | https.Agent {\n const isHttps = isUrlHttps(httpRequest.url);\n\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n const { host, port, username, password } = httpRequest.proxySettings;\n const key = `${host}:${port}:${username}:${password}`;\n const proxyAgents = this.proxyAgentMap.get(key) ?? {};\n\n let agent = getCachedAgent(isHttps, proxyAgents);\n if (agent) {\n return agent;\n }\n\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n proxyAgents.httpsAgent = tunnel.agent as https.Agent;\n } else {\n proxyAgents.httpAgent = tunnel.agent;\n }\n this.proxyAgentMap.set(key, proxyAgents);\n\n return agent;\n } else if (httpRequest.keepAlive) {\n let agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n\n const agentOptions: http.AgentOptions | https.AgentOptions = {\n keepAlive: httpRequest.keepAlive,\n };\n\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n } else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n\n return agent;\n } else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n }\n\n /**\n * Uses `node-fetch` to perform the request.\n */\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return node_fetch(input, init) as unknown as Promise;\n }\n\n /**\n * Prepares a request based on the provided web resource.\n */\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n\n requestInit.compress = httpRequest.decompressResponse;\n\n return requestInit;\n }\n\n /**\n * Process an HTTP response.\n */\n async processRequest(_operationResponse: HttpOperationResponse): Promise {\n /* no_op */\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase, TransferProgressEvent } from \"./webResource\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * The base options type for all operations.\n */\nexport interface OperationOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: OperationRequestOptions;\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * Options that allow configuring the handling of HTTP requests made by an SDK operation.\n */\nexport interface OperationRequestOptions {\n /**\n * User defined custom request headers that will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n}\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nexport function operationOptionsToRequestOptionsBase(\n opts: T\n): RequestOptionsBase {\n const { requestOptions, tracingOptions, ...additionalOptions } = opts;\n\n let result: RequestOptionsBase = additionalOptions;\n\n if (requestOptions) {\n result = { ...result, ...requestOptions };\n }\n\n if (tracingOptions) {\n result.tracingContext = tracingOptions.tracingContext;\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n result.spanOptions = (tracingOptions as any)?.spanOptions;\n }\n\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\n/**\n * The underlying structure of a request policy.\n */\nexport interface RequestPolicy {\n /**\n * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made.\n * @param httpRequest - {@link WebResourceLike} describing the request to be made.\n */\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\n/**\n * The base class from which all request policies derive.\n */\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n /**\n * The main method to implement that manipulates a request/response.\n */\n protected constructor(\n /**\n * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.\n */\n readonly _nextPolicy: RequestPolicy,\n /**\n * The options that can be passed to a given request policy.\n */\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n /**\n * Sends a network request based on the given web resource.\n * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made.\n */\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as xml2js from \"xml2js\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nconst xml2jsDefaultOptionsV2: xml2js.OptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true,\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\",\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false,\n};\n\n// The xml2js settings for general XML parsing operations.\nconst xml2jsParserSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n\n// The xml2js settings for general XML building operations.\nconst xml2jsBuilderSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false,\n};\n\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nexport function stringifyXML(obj: unknown, opts: SerializerOptions = {}): string {\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err: any, res: any) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { SerializerOptions, XML_CHARKEY } from \"../util/serializer.common\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { MapperType } from \"../serializer\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { parseXML } from \"../util/xml\";\n\n/**\n * Options to configure API response deserialization.\n */\nexport interface DeserializationOptions {\n /**\n * Configures the expected content types for the deserialization of\n * JSON and XML response bodies.\n */\n expectedContentTypes: DeserializationContentTypes;\n}\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions?: SerializerOptions\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DeserializationPolicy(\n nextPolicy,\n options,\n deserializationContentTypes,\n parsingOptions\n );\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\nexport const DefaultDeserializationOptions: DeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes,\n },\n};\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n public readonly xmlCharKey: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n requestPolicyOptions: RequestPolicyOptions,\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions: SerializerOptions = {}\n ) {\n super(nextPolicy, requestPolicyOptions);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n this.xmlCharKey = parsingOptions.xmlCharKey ?? XML_CHARKEY;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, {\n xmlCharKey: this.xmlCharKey,\n })\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\n/**\n * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}.\n * @param jsonContentTypes - Response content types to parse the body as JSON.\n * @param xmlContentTypes - Response content types to parse the body as XML.\n * @param response - HTTP Response from the pipeline.\n * @param options - Options to the serializer, mostly for configuring the XML parser if needed.\n * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}.\n */\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse,\n options: SerializerOptions = {}\n): Promise {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(\n (parsedResponse) => {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n\n const operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n\n const responseSpec = getOperationResponse(parsedResponse);\n\n const { error, shouldReturnResponse } = handleErrorResponse(\n parsedResponse,\n operationSpec,\n responseSpec\n );\n if (error) {\n throw error;\n } else if (shouldReturnResponse) {\n return parsedResponse;\n }\n\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\",\n options\n );\n } catch (innerError: any) {\n const restError = new RestError(\n `Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n throw restError;\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.toJson(),\n \"operationRes.parsedHeaders\",\n options\n );\n }\n }\n\n return parsedResponse;\n }\n );\n}\n\nfunction isOperationSpecEmpty(operationSpec: OperationSpec): boolean {\n const expectedStatusCodes = Object.keys(operationSpec.responses);\n return (\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\")\n );\n}\n\nfunction handleErrorResponse(\n parsedResponse: HttpOperationResponse,\n operationSpec: OperationSpec,\n responseSpec: OperationResponse | undefined\n): { error: RestError | null; shouldReturnResponse: boolean } {\n const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n } else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n\n const errorResponseSpec = responseSpec ?? operationSpec.responses.default;\n const streaming =\n parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ||\n parsedResponse.request.streamResponseBody;\n const initialErrorMessage = streaming\n ? `Unexpected status code: ${parsedResponse.status}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(\n initialErrorMessage,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n\n const defaultBodyMapper = errorResponseSpec.bodyMapper;\n const defaultHeadersMapper = errorResponseSpec.headersMapper;\n\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n const parsedBody = parsedResponse.parsedBody;\n let parsedError;\n if (defaultBodyMapper) {\n let valueToDeserialize: any = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName!] : [];\n }\n parsedError = operationSpec.serializer.deserialize(\n defaultBodyMapper,\n valueToDeserialize,\n \"error.response.parsedBody\"\n );\n }\n\n const internalError: any = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n\n if (defaultBodyMapper) {\n error.response!.parsedBody = parsedError;\n }\n }\n\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response!.parsedHeaders = operationSpec.serializer.deserialize(\n defaultHeadersMapper,\n parsedResponse.headers.toJson(),\n \"operationRes.parsedHeaders\"\n );\n }\n } catch (defaultError: any) {\n error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`;\n }\n\n return { error, shouldReturnResponse: false };\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse,\n opts: Required\n): Promise {\n const errorHandler = (err: Error & { code: string }): Promise => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse\n );\n return Promise.reject(e);\n };\n\n const streaming =\n operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text, opts)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\nexport interface KeepAliveOptions {\n /**\n * When true, connections will be kept alive for multiple requests.\n * Defaults to true.\n */\n enable: boolean;\n}\n\n/**\n * By default, HTTP connections are maintained for future requests.\n */\nexport const DefaultKeepAliveOptions: KeepAliveOptions = {\n enable: true,\n};\n\n/**\n * Creates a policy that controls whether HTTP connections are maintained on future requests.\n * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests.\n * @returns An instance of the {@link KeepAlivePolicy}\n */\nexport function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n },\n };\n}\n\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nexport class KeepAlivePolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private readonly keepAliveOptions: KeepAliveOptions\n ) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResourceLike): Promise {\n request.keepAlive = this.keepAliveOptions.enable;\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nconst allowedRedirect = [\"GET\", \"HEAD\"];\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /**\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /**\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\n/**\n * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n * @param maximumRetries - Maximum number of redirects to follow.\n * @returns An instance of the {@link RedirectPolicy}\n */\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\n/**\n * Resends the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n */\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, readonly maxRetries = 20) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1));\n }\n\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"..\";\n\nexport const DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nexport const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nexport const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nexport const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\nexport function isNumber(n: unknown): n is number {\n return typeof n === \"number\";\n}\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nexport function shouldRetry(\n retryLimit: number,\n predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean,\n retryData: RetryData,\n response?: HttpOperationResponse,\n error?: RetryError\n): boolean {\n if (!predicate(response, error)) {\n return false;\n }\n\n return retryData.retryCount < retryLimit;\n}\n\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nexport function updateRetryData(\n retryOptions: { retryInterval: number; minRetryInterval: number; maxRetryInterval: number },\n retryData: RetryData = { retryCount: 0, retryInterval: 0 },\n err?: RetryError\n): RetryData {\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n const boundedRandDelta =\n retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n retryOptions.minRetryInterval + incrementDelta,\n retryOptions.maxRetryInterval\n );\n\n return retryData;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\nimport { logger } from \"../log\";\n\n/**\n * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time.\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - Base time between retries.\n * @param maxRetryInterval - Maximum time to wait between retries.\n */\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * Describes the Retry Mode type. Currently supporting only Exponential.\n */\nexport enum RetryMode {\n /**\n * Currently supported retry mode.\n * Each time a retry happens, it will take exponentially more time than the last time.\n */\n Exponential,\n}\n\n/**\n * Options that control how to retry failed requests.\n */\nexport interface RetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n\n /**\n * The amount of delay in milliseconds between retry attempts. Defaults to 30000\n * (30 seconds). The delay increases exponentially with each retry up to a maximum\n * specified by maxRetryDelayInMs.\n */\n retryDelayInMs?: number;\n\n /**\n * The maximum delay in milliseconds allowed before retrying an operation. Defaults\n * to 90000 (90 seconds).\n */\n maxRetryDelayInMs?: number;\n\n /**\n * Currently supporting only Exponential mode.\n */\n mode?: RetryMode;\n}\n\nexport const DefaultRetryOptions: RetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n};\n\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\nasync function retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n function shouldPolicyRetry(responseParam?: HttpOperationResponse): boolean {\n const statusCode = responseParam?.status;\n if (statusCode === 503 && response?.headers.get(Constants.HeaderConstants.RETRY_AFTER)) {\n return false;\n }\n\n if (\n statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n return true;\n }\n\n retryData = updateRetryData(\n {\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval,\n },\n retryData,\n requestError\n );\n\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) {\n logger.info(`Retrying request in ${retryData.retryInterval}`);\n try {\n await delay(retryData.retryInterval);\n const res = await policy._nextPolicy.sendRequest(request.clone());\n return retry(policy, request, res, retryData);\n } catch (err: any) {\n return retry(policy, request, response, retryData, err);\n }\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n throw err;\n } else {\n return response;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Debugger } from \"@azure/logger\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Sanitizer } from \"../util/sanitizer\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger as coreLogger } from \"../log\";\n\n/**\n * Options to pass to the {@link logPolicy}.\n * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged.\n */\nexport interface LogPolicyOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to:\n * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id,\n * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials,\n * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers,\n * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding,\n * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match,\n * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent.\n *\n * Any headers specified in this field will be added to that list.\n * Any other values will be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n\n /**\n * The Debugger (logger) instance to use for writing pipeline logs.\n */\n logger?: Debugger;\n}\n\n/**\n * Creates a policy that logs information about the outgoing request and the incoming responses.\n * @param loggingOptions - Logging options.\n * @returns An instance of the {@link LogPolicy}\n */\nexport function logPolicy(loggingOptions: LogPolicyOptions = {}): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n },\n };\n}\n\n/**\n * A policy that logs information about the outgoing request and the incoming responses.\n */\nexport class LogPolicy extends BaseRequestPolicy {\n logger: Debugger;\n sanitizer: Sanitizer;\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedHeaderNames(): Set {\n return this.sanitizer.allowedHeaderNames;\n }\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedHeaderNames(allowedHeaderNames: Set) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedQueryParameters(): Set {\n return this.sanitizer.allowedQueryParameters;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedQueryParameters(allowedQueryParameters: Set) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n }\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n {\n logger = coreLogger.info,\n allowedHeaderNames = [],\n allowedQueryParameters = [],\n }: LogPolicyOptions = {}\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters });\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!this.logger.enabled) return this._nextPolicy.sendRequest(request);\n\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response));\n }\n\n private logRequest(request: WebResourceLike): void {\n this.logger(`Request: ${this.sanitizer.sanitize(request)}`);\n }\n\n private logResponse(response: HttpOperationResponse): HttpOperationResponse {\n this.logger(`Response status code: ${response.status}`);\n this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`);\n return response;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\n\n/**\n * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values.\n */\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { MapperType, Serializer } from \"./serializer\";\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { HttpMethods } from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\n\n/**\n * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The media type of the request body.\n * This value can be used to aide in serialization if it is provided.\n */\n readonly mediaType?:\n | \"json\"\n | \"xml\"\n | \"form\"\n | \"binary\"\n | \"multipart\"\n | \"text\"\n | \"unknown\"\n | string;\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nexport function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as os from \"os\";\nimport { Constants } from \"../util/constants\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Telemetry information. Key/value pairs to include inside the User-Agent string.\n */\nexport type TelemetryInfo = { key?: string; value?: string };\n\n/**\n * Options for adding user agent details to outgoing requests.\n */\nexport interface UserAgentOptions {\n /**\n * String prefix to add to the user agent for outgoing requests.\n * Defaults to an empty string.\n */\n userAgentPrefix?: string;\n}\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\n/**\n * The default approach to generate user agents.\n * Uses static information from this package, plus system information available from the runtime.\n */\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\n/**\n * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n * @param userAgentData - Telemetry information.\n * @returns A new {@link UserAgentPolicy}.\n */\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\n/**\n * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n */\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptions,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n /**\n * Adds the user agent header to the outgoing request.\n */\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n /**\n * CSV: Each pair of segments joined by a single comma.\n */\n Csv = \",\",\n /**\n * SSV: Each pair of segments joined by a single space character.\n */\n Ssv = \" \",\n /**\n * TSV: Each pair of segments joined by a single tab character.\n */\n Tsv = \"\\t\",\n /**\n * Pipes: Each pair of segments joined by a single pipe character.\n */\n Pipes = \"|\",\n /**\n * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2`\n */\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"../policies/requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nexport const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nexport function bearerTokenAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n const getToken = createTokenCycler(credential, scopes /* , options */);\n\n class BearerTokenAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const { token } = await getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n });\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResource): Promise {\n request.decompressResponse = false;\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that assigns a unique request id to outgoing requests.\n * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request.\n */\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\n\nlet cachedHttpClient: HttpClient | undefined;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!cachedHttpClient) {\n cachedHttpClient = new DefaultHttpClient();\n }\n\n return cachedHttpClient;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// BaseRequestPolicy has a protected constructor.\n/* eslint-disable @typescript-eslint/no-useless-constructor */\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function ndJsonPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new NdJsonPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nclass NdJsonPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends a request.\n */\n public async sendRequest(request: WebResourceLike): Promise {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n const body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map((item) => JSON.stringify(item) + \"\\n\").join(\"\");\n }\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getEnvironmentValue } from \"../util/utils\";\n\n/**\n * Stores the patterns specified in NO_PROXY environment variable.\n * @internal\n */\nexport const globalNoProxyList: string[] = [];\nlet noProxyListLoaded: boolean = false;\n\n/** A cache of whether a host should bypass the proxy. */\nconst globalBypassedMap: Map = new Map();\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n/**\n * Check whether the host of a given `uri` matches any pattern in the no proxy list.\n * If there's a match, any request sent to the same host shouldn't have the proxy settings set.\n * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\n */\nfunction isBypassed(\n uri: string,\n noProxyList: string[],\n bypassedMap?: Map\n): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (bypassedMap?.has(host)) {\n return bypassedMap.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n bypassedMap?.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n noProxyListLoaded = true;\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed.\n * @param proxyUrl - URL of the proxy\n * @returns The default proxy settings, or undefined.\n */\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\n/**\n * A policy that allows one to apply proxy settings to all requests.\n * If not passed static settings, they will be retrieved from the HTTPS_PROXY\n * or HTTP_PROXY environment variables.\n * @param proxySettings - ProxySettings to use on each request.\n * @param options - additional settings, for example, custom NO_PROXY patterns\n */\nexport function proxyPolicy(\n proxySettings?: ProxySettings,\n options?: {\n /** a list of patterns to override those loaded from NO_PROXY environment variable. */\n customNoProxyList?: string[];\n }\n): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n if (!noProxyListLoaded) {\n globalNoProxyList.push(...loadNoProxy());\n }\n return {\n create: (nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions) => {\n return new ProxyPolicy(\n nextPolicy,\n requestPolicyOptions,\n proxySettings!,\n options?.customNoProxyList\n );\n },\n };\n}\n\nfunction extractAuthFromUrl(url: string): {\n username?: string;\n password?: string;\n urlWithoutAuth: string;\n} {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public proxySettings: ProxySettings,\n private customNoProxyList?: string[]\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (\n !request.proxySettings &&\n !isBypassed(\n request.url,\n this.customNoProxyList ?? globalNoProxyList,\n this.customNoProxyList ? undefined : globalBypassedMap\n )\n ) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"../util/utils\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err: any) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n */\nasync function registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n const response = await policy._nextPolicy.sendRequest(reqOptions);\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nasync function getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n const res = await policy._nextPolicy.sendRequest(reqOptions);\n const obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n await delay(policy._retryTimeout * 1000);\n return getRegistrationStatus(policy, url, originalRequest);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n * @param authenticationProvider - The authentication provider.\n * @returns An instance of the {@link SigningPolicy}.\n */\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\n/**\n * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n */\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n * @returns An instance of the {@link SystemErrorRetryPolicy}\n */\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n\n function shouldPolicyRetry(_response?: HttpOperationResponse, error?: RetryError): boolean {\n if (\n error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")\n ) {\n return true;\n }\n return false;\n }\n\n if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (nestedErr: any) {\n return retry(policy, request, operationResponse, nestedErr, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Maximum number of retries for the throttling retry policy\n */\nexport const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { Constants } from \"../util/constants\";\nimport { DEFAULT_CLIENT_MAX_RETRY_COUNT } from \"../util/throttlingRetryStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"@azure/core-util\";\n\ntype ResponseHandler = (\n httpRequest: WebResourceLike,\n response: HttpOperationResponse\n) => Promise;\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n * @returns\n */\nexport function throttlingRetryPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n },\n };\n}\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private _handleResponse: ResponseHandler;\n private numberOfRetries = 0;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n _handleResponse?: ResponseHandler\n ) {\n super(nextPolicy, options);\n this._handleResponse = _handleResponse || this._defaultResponseHandler;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n const response = await this._nextPolicy.sendRequest(httpRequest.clone());\n if (\n response.status !== StatusCodes.TooManyRequests &&\n response.status !== StatusCodes.ServiceUnavailable\n ) {\n return response;\n } else {\n return this._handleResponse(httpRequest, response);\n }\n }\n\n private async _defaultResponseHandler(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse\n ): Promise {\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader) {\n const delayInMs: number | undefined =\n ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n this.numberOfRetries += 1;\n\n await delay(delayInMs, {\n abortSignal: httpRequest.abortSignal,\n abortErrorMsg: StandardAbortMessage,\n });\n\n if (httpRequest.abortSignal?.aborted) {\n throw new AbortError(StandardAbortMessage);\n }\n\n if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) {\n return this.sendRequest(httpRequest);\n } else {\n return this._nextPolicy.sendRequest(httpRequest);\n }\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error: any) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n Span,\n SpanKind,\n SpanStatusCode,\n createSpanFunction,\n getTraceParentHeader,\n isSpanContextValid,\n} from \"@azure/core-tracing\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger } from \"../log\";\n\nconst createSpan = createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\",\n});\n\n/**\n * Options to customize the tracing policy.\n */\nexport interface TracingPolicyOptions {\n /**\n * User agent used to better identify the outgoing requests traced by the tracing policy.\n */\n userAgent?: string;\n}\n\n/**\n * Creates a policy that wraps outgoing requests with a tracing span.\n * @param tracingOptions - Tracing options.\n * @returns An instance of the {@link TracingPolicy} class.\n */\nexport function tracingPolicy(tracingOptions: TracingPolicyOptions = {}): RequestPolicyFactory {\n return {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n },\n };\n}\n\n/**\n * A policy that wraps outgoing requests with a tracing span.\n */\nexport class TracingPolicy extends BaseRequestPolicy {\n private userAgent?: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n tracingOptions: TracingPolicyOptions\n ) {\n super(nextPolicy, options);\n this.userAgent = tracingOptions.userAgent;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n if (!request.tracingContext) {\n return this._nextPolicy.sendRequest(request);\n }\n\n const span = this.tryCreateSpan(request);\n\n if (!span) {\n return this._nextPolicy.sendRequest(request);\n }\n\n try {\n const response = await this._nextPolicy.sendRequest(request);\n this.tryProcessResponse(span, response);\n return response;\n } catch (err: any) {\n this.tryProcessError(span, err);\n throw err;\n }\n }\n\n tryCreateSpan(request: WebResourceLike): Span | undefined {\n try {\n // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier.\n // We can pass this as a separate parameter once we upgrade to the latest core-tracing.\n const { span } = createSpan(`HTTP ${request.method}`, {\n tracingOptions: {\n spanOptions: {\n ...(request as any).spanOptions,\n kind: SpanKind.CLIENT,\n },\n tracingContext: request.tracingContext,\n },\n });\n\n // If the span is not recording, don't do any more work.\n if (!span.isRecording()) {\n span.end();\n return undefined;\n }\n\n const namespaceFromContext = request.tracingContext?.getValue(Symbol.for(\"az.namespace\"));\n\n if (typeof namespaceFromContext === \"string\") {\n span.setAttribute(\"az.namespace\", namespaceFromContext);\n }\n\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId,\n });\n\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n\n // set headers\n const spanContext = span.spanContext();\n const traceParentHeader = getTraceParentHeader(spanContext);\n if (traceParentHeader && isSpanContextValid(spanContext)) {\n request.headers.set(\"traceparent\", traceParentHeader);\n const traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return span;\n } catch (error: any) {\n logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`);\n return undefined;\n }\n }\n\n private tryProcessError(span: Span, err: any): void {\n try {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: err.message,\n });\n\n if (err.statusCode) {\n span.setAttribute(\"http.status_code\", err.statusCode);\n }\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n\n private tryProcessResponse(span: Span, response: HttpOperationResponse): void {\n try {\n span.setAttribute(\"http.status_code\", response.status);\n const serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.setStatus({\n code: SpanStatusCode.OK,\n });\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"./util/utils\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport {\n DefaultDeserializationOptions,\n DeserializationContentTypes,\n deserializationPolicy,\n} from \"./policies/deserializationPolicy\";\nimport { DefaultKeepAliveOptions, keepAlivePolicy } from \"./policies/keepAlivePolicy\";\nimport { DefaultRedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport { DefaultRetryOptions, exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nimport {\n OperationParameter,\n ParameterPath,\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n} from \"./operationParameter\";\nimport { OperationSpec, getStreamResponseStatusCodes } from \"./operationSpec\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResource,\n WebResourceLike,\n isWebResourceLike,\n} from \"./webResource\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./policies/requestPolicy\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { isNode } from \"@azure/core-util\";\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport {\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n userAgentPolicy,\n} from \"./policies/userAgentPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { InternalPipelineOptions } from \"./pipelineOptions\";\nimport { OperationArguments } from \"./operationArguments\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { URLBuilder } from \"./url\";\nimport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nimport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport { getCachedDefaultHttpClient } from \"./httpClientCache\";\nimport { logger } from \"./log\";\nimport { ndJsonPolicy } from \"./policies/ndJsonPolicy\";\nimport { proxyPolicy } from \"./policies/proxyPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { stringifyXML } from \"./util/xml\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { tracingPolicy } from \"./policies/tracingPolicy\";\n\n/**\n * Options to configure a proxy for outgoing requests (Node.js only).\n */\nexport interface ProxySettings {\n /**\n * The proxy's host address.\n */\n host: string;\n\n /**\n * The proxy host's port.\n */\n port: number;\n\n /**\n * The user name to authenticate with the proxy, if required.\n */\n username?: string;\n\n /**\n * The password to authenticate with the proxy, if required.\n */\n password?: string;\n}\n\n/**\n * An alias of {@link ProxySettings} for future use.\n */\nexport type ProxyOptions = ProxySettings;\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-useragent\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * If specified, will be used to build the BearerTokenAuthenticationPolicy.\n */\n credentialScopes?: string | string[];\n}\n\n/**\n * ServiceClient sends service requests and receives responses.\n */\nexport class ServiceClient {\n /**\n * If specified, this is the base URI that requests will be made against for this ServiceClient.\n * If it is not specified, then all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptions;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: TokenCredential | ServiceClientCredentials,\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n let authPolicyFactory: RequestPolicyFactory | undefined = undefined;\n if (isTokenCredential(credentials)) {\n logger.info(\n \"ServiceClient: creating bearer token authentication policy from provided credentials\"\n );\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n const wrappedPolicyFactory: () => RequestPolicyFactory = () => {\n let bearerTokenPolicyFactory: RequestPolicyFactory | undefined = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const serviceClient = this;\n const serviceClientOptions = options;\n return {\n create(nextPolicy: RequestPolicy, createOptions: RequestPolicyOptions): RequestPolicy {\n const credentialScopes = getCredentialScopes(\n serviceClientOptions,\n serviceClient.baseUri\n );\n\n if (!credentialScopes) {\n throw new Error(\n `When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`\n );\n }\n\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(\n credentials,\n credentialScopes\n );\n }\n\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n },\n };\n };\n\n authPolicyFactory = wrappedPolicyFactory();\n } else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n } else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n const newRequestPolicyFactories: void | RequestPolicyFactory[] =\n options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error: any) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n async sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const serializerOptions = operationArguments.options?.serializerOptions;\n const httpRequest: WebResourceLike = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter),\n serializerOptions\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter),\n serializerOptions\n );\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null\n ) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter),\n serializerOptions\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n\n if (options.spanOptions) {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n (httpRequest as any).spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n\n let rawResponse: HttpOperationResponse;\n let sendRequestError;\n try {\n rawResponse = await this.sendRequest(httpRequest);\n } catch (error: any) {\n sendRequestError = error;\n }\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(\n sendRequestError.response,\n operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]\n );\n }\n result = Promise.reject(sendRequestError);\n } else {\n result = Promise.resolve(\n flattenResponse(rawResponse!, operationSpec.responses[rawResponse!.status])\n );\n }\n } catch (error: any) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n const serializerOptions = operationArguments.options?.serializerOptions ?? {};\n const updatedOptions: Required = {\n rootName: serializerOptions.rootName ?? \"\",\n includeRoot: serializerOptions.includeRoot ?? false,\n xmlCharKey: serializerOptions.xmlCharKey ?? XML_CHARKEY,\n };\n\n const xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } =\n bodyMapper;\n const typeName = bodyMapper.type.name;\n\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString,\n updatedOptions\n );\n\n const isStream = typeName === MapperType.Stream;\n\n if (operationSpec.isXML) {\n const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : \"xmlns\";\n const value = getXmlValueWithNamespace(\n xmlNamespace,\n xmlnsKey,\n typeName,\n httpRequest.body,\n updatedOptions\n );\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n value,\n xmlElementName || xmlName || serializedName!,\n xmlnsKey,\n xmlNamespace\n ),\n {\n rootName: xmlName || serializedName,\n xmlCharKey,\n }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey,\n });\n }\n } else if (\n typeName === MapperType.String &&\n (operationSpec.contentType?.match(\"text/plain\") || operationSpec.mediaType === \"text\")\n ) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error: any) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter),\n updatedOptions\n );\n }\n }\n }\n}\n\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(\n xmlNamespace: string | undefined,\n xmlnsKey: string,\n typeName: string,\n serializedValue: any,\n options: Required\n): any {\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };\n return result;\n }\n\n return serializedValue;\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n authPolicyFactory: RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n if (isNode) {\n factories.push(proxyPolicy(options.proxySettings));\n }\n\n factories.push(logPolicy({ logger: logger.info }));\n\n return factories;\n}\n\n/**\n * Creates an HTTP pipeline based on the given options.\n * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client.\n * @param authPolicyFactory - An optional authentication policy factory to use for signing requests.\n * @returns A set of options that can be passed to create a new {@link ServiceClient}.\n */\nexport function createPipelineFromOptions(\n pipelineOptions: InternalPipelineOptions,\n authPolicyFactory?: RequestPolicyFactory\n): ServiceClientOptions {\n const requestPolicyFactories: RequestPolicyFactory[] = [];\n\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n\n let userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n const userAgentInfo: string[] = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n const defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n\n userAgentValue = userAgentInfo.join(\" \");\n }\n\n const keepAliveOptions = {\n ...DefaultKeepAliveOptions,\n ...pipelineOptions.keepAliveOptions,\n };\n\n const retryOptions = {\n ...DefaultRetryOptions,\n ...pipelineOptions.retryOptions,\n };\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...pipelineOptions.redirectOptions,\n };\n\n if (isNode) {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n\n const deserializationOptions = {\n ...DefaultDeserializationOptions,\n ...pipelineOptions.deserializationOptions,\n };\n\n const loggingOptions: LogPolicyOptions = {\n ...pipelineOptions.loggingOptions,\n };\n\n requestPolicyFactories.push(\n tracingPolicy({ userAgent: userAgentValue }),\n keepAlivePolicy(keepAliveOptions),\n userAgentPolicy({ value: userAgentValue }),\n generateClientRequestIdPolicy(),\n deserializationPolicy(deserializationOptions.expectedContentTypes),\n throttlingRetryPolicy(),\n systemErrorRetryPolicy(),\n exponentialRetryPolicy(\n retryOptions.maxRetries,\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n )\n );\n\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n\n requestPolicyFactories.push(logPolicy(loggingOptions));\n\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories,\n };\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n const serializerOptions = operationArguments.options?.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\n/**\n * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}).\n * @param _response - Wrapper object for http response.\n * @param responseSpec - Mappers for how to parse the response properties.\n * @returns - A normalized response object.\n */\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = >(\n obj: T\n ): T & {\n _response: HttpOperationResponse;\n } => {\n return Object.defineProperty(obj, \"_response\", {\n value: _response,\n }) as T & {\n _response: HttpOperationResponse;\n };\n };\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n const arrayResponse = [...(_response.parsedBody || [])] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n\nfunction getCredentialScopes(\n options?: ServiceClientOptions,\n baseUri?: string\n): string | string[] | undefined {\n if (options?.credentialScopes) {\n return options.credentialScopes;\n }\n\n if (baseUri) {\n return `${baseUri}/.default`;\n }\n return undefined;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// NOTE: we've moved this code into core-tracing but these functions\n// were a part of the GA'd library and can't be removed until the next major\n// release. They currently get called always, even if tracing is not enabled.\n\nimport { Span, createSpanFunction as coreTracingCreateSpanFunction } from \"@azure/core-tracing\";\nimport { OperationOptions } from \"./operationOptions\";\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use core-tracing instead.\n * @hidden\n */\nexport interface SpanConfig {\n /**\n * Package name prefix\n */\n packagePrefix: string;\n /**\n * Service namespace\n */\n namespace: string;\n}\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nexport function createSpanFunction(\n args: SpanConfig\n): (\n operationName: string,\n operationOptions: T\n) => { span: Span; updatedOptions: T } {\n return coreTracingCreateSpanFunction(args);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken } from \"@azure/core-auth\";\n\n/**\n * Defines the default token refresh buffer duration.\n */\nexport const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n\n/**\n * Provides a cache for an AccessToken that was that\n * was returned from a TokenCredential.\n */\nexport interface AccessTokenCache {\n /**\n * Sets the cached token.\n *\n * @param accessToken - The {@link AccessToken} to be cached or null to\n * clear the cached token.\n */\n setCachedToken(accessToken: AccessToken | undefined): void;\n\n /**\n * Returns the cached {@link AccessToken} or undefined if nothing is cached.\n */\n getCachedToken(): AccessToken | undefined;\n}\n\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class ExpiringAccessTokenCache implements AccessTokenCache {\n private tokenRefreshBufferMs: number;\n private cachedToken?: AccessToken = undefined;\n\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n constructor(tokenRefreshBufferMs: number = TokenRefreshBufferMs) {\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n\n /**\n * Saves an access token into the internal in-memory cache.\n * @param accessToken - Access token or undefined to clear the cache.\n */\n setCachedToken(accessToken: AccessToken | undefined): void {\n this.cachedToken = accessToken;\n }\n\n /**\n * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon.\n */\n getCachedToken(): AccessToken | undefined {\n if (\n this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp\n ) {\n this.cachedToken = undefined;\n }\n\n return this.cachedToken;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\n\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class AccessTokenRefresher {\n private promise: Promise | undefined;\n private lastCalled = 0;\n\n constructor(\n private credential: TokenCredential,\n private scopes: string | string[],\n private requiredMillisecondsBeforeNewRefresh: number = 30000\n ) {}\n\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n public isReady(): boolean {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (\n !this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh\n );\n }\n\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n private async getToken(options: GetTokenOptions): Promise {\n this.lastCalled = Date.now();\n const token = await this.credential.getToken(this.scopes, options);\n this.promise = undefined;\n return token || undefined;\n }\n\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n public refresh(options: GetTokenOptions): Promise {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n\n return this.promise;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\n/**\n * A simple {@link ServiceClientCredential} that authenticates with a username and a password.\n */\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n /**\n * Username\n */\n userName: string;\n\n /**\n * Password\n */\n password: string;\n\n /**\n * Authorization scheme. Defaults to \"Basic\".\n * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes\n */\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ApiKeyCredentialOptions, ApiKeyCredentials } from \"./apiKeyCredentials\";\n\n/**\n * A {@link TopicCredentials} object used for Azure Event Grid.\n */\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","tunnel","inspect","createClientLogger","Transform","abortController","AbortController","AbortError","FormData","https","http","node_fetch","HttpPipelineLogLevel","__rest","xml2js","RetryMode","retry","delay","logger","coreLogger","os","QueryCollectionFormat","DefaultHttpClient","utils.generateUuid","createSpanFunction","SpanKind","getTraceParentHeader","isSpanContextValid","SpanStatusCode","isTokenCredential","utils.prepareXMLRootList","isNode","utils.isPrimitiveType","coreTracingCreateSpanFunction","base64.encodeString"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AAEA;;AAEG;AACH,SAAS,YAAY,CAAC,UAAkB,EAAA;AACtC,IAAA,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4EK,SAAU,iBAAiB,CAAC,MAAgB,EAAA;AAChD,IAAA,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAWlB,CAAC;AACF,QAAA,IACE,OAAO,UAAU,CAAC,UAAU,KAAK,UAAU;AAC3C,YAAA,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU;AACtC,YAAA,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;AACpC,YAAA,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;AACpC,YAAA,OAAO,UAAU,CAAC,QAAQ,KAAK,UAAU;AACzC,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU;AACvC,YAAA,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;AAC7C,YAAA,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;AAC7C,YAAA,OAAO,UAAU,CAAC,WAAW,KAAK,UAAU;AAC5C,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU,EACvC;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACF,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;AAEG;MACU,WAAW,CAAA;AAGtB,IAAA,WAAA,CAAY,UAA2B,EAAA;AACrC,QAAA,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;AACtB,QAAA,IAAI,UAAU,EAAE;AACd,YAAA,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;AAC9C,aAAA;AACF,SAAA;KACF;AAED;;;;;AAKG;IACI,GAAG,CAAC,UAAkB,EAAE,WAA4B,EAAA;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;AAC3C,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE;SACrC,CAAC;KACH;AAED;;;;AAIG;AACI,IAAA,GAAG,CAAC,UAAkB,EAAA;QAC3B,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;KAC3C;AAED;;AAEG;AACI,IAAA,QAAQ,CAAC,UAAkB,EAAA;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;KACrD;AAED;;;;AAIG;AACI,IAAA,MAAM,CAAC,UAAkB,EAAA;QAC9B,MAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;AAClD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,UAAU,GAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;KAC5C;AAED;;AAEG;IACI,YAAY,GAAA;QACjB,MAAM,OAAO,GAAiB,EAAE,CAAC;AACjC,QAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;AAC3C,SAAA;AACD,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;AAEG;IACI,WAAW,GAAA;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;AACjC,QAAA,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;AAClD,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACnC,SAAA;AACD,QAAA,OAAO,WAAW,CAAC;KACpB;AAED;;AAEG;IACI,YAAY,GAAA;QACjB,MAAM,YAAY,GAAa,EAAE,CAAC;AAClC,QAAA,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;AAClD,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;AACrC,SAAA;AACD,QAAA,OAAO,YAAY,CAAC;KACrB;AAED;;AAEG;IACI,MAAM,CAAC,UAAsC,EAAE,EAAA;QACpD,MAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,YAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AACpC,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;AACvD,gBAAA,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AAClD,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KAC5D;AAED;;AAEG;IACI,KAAK,GAAA;QACV,MAAM,sBAAsB,GAAmB,EAAE,CAAC;AAClD,QAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,sBAAsB,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AACpD,SAAA;AACD,QAAA,OAAO,IAAI,WAAW,CAAC,sBAAsB,CAAC,CAAC;KAChD;AACF;;AC5PD;AACA;AAEA;;;AAGG;AACG,SAAU,YAAY,CAAC,KAAa,EAAA;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;AAGG;AACG,SAAU,eAAe,CAAC,KAAiB,EAAA;;;IAG/C,MAAM,WAAW,GAAG,KAAK,YAAY,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;AAC/F,IAAA,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;AAGG;AACG,SAAU,YAAY,CAAC,KAAa,EAAA;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC;;AC5BA;AACA;AACA;;AAEG;AACU,MAAA,SAAS,GAAG;AACvB;;AAEG;AACH,IAAA,eAAe,EAAE,OAAO;AAExB;;AAEG;AACH,IAAA,IAAI,EAAE,OAAO;AAEb;;AAEG;AACH,IAAA,KAAK,EAAE,QAAQ;AAEf;;AAEG;AACH,IAAA,UAAU,EAAE,YAAY;AAExB;;AAEG;AACH,IAAA,WAAW,EAAE,aAAa;AAE1B;;AAEG;AACH,IAAA,QAAQ,EAAE,UAAU;AAEpB;;AAEG;AACH,IAAA,SAAS,EAAE,WAAW;AAEtB,IAAA,aAAa,EAAE;AACb;;AAEG;AACH,QAAA,SAAS,EAAE;AACT,YAAA,GAAG,EAAE,KAAK;AACV,YAAA,GAAG,EAAE,KAAK;AACV,YAAA,MAAM,EAAE,QAAQ;AAChB,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,KAAK,EAAE,OAAO;AACd,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,KAAK,EAAE,OAAO;AACf,SAAA;AAED,QAAA,WAAW,EAAE;AACX,YAAA,eAAe,EAAE,GAAG;AACpB,YAAA,kBAAkB,EAAE,GAAG;AACxB,SAAA;AACF,KAAA;AAED;;AAEG;AACH,IAAA,eAAe,EAAE;AACf;;AAEG;AACH,QAAA,aAAa,EAAE,eAAe;AAE9B,QAAA,oBAAoB,EAAE,QAAQ;AAE9B;;;;AAIG;AACH,QAAA,WAAW,EAAE,aAAa;AAE1B;;AAEG;AACH,QAAA,UAAU,EAAE,YAAY;AACzB,KAAA;;;ACnFH;AACA;AAEA;;AAEG;AACI,MAAM,WAAW,GAAG,IAAI;AAC/B;;AAEG;AACI,MAAM,WAAW,GAAG;;ACV3B;AAUA,MAAM,cAAc,GAClB,gFAAgF,CAAC;AAYnF;;;;;AAKG;AACG,SAAU,SAAS,CAAC,GAAW,EAAA;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;AAC3B,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;AAMG;AACG,SAAU,aAAa,CAAC,QAA+B,EAAA;IAC3D,MAAM,gBAAgB,GAAQ,EAAE,CAAC;AACjC,IAAA,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;AAC5C,IAAA,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;AAC5C,IAAA,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AAC1C,IAAA,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;AAMG;AACG,SAAU,YAAY,CAAC,OAAwB,EAAA;AACnD,IAAA,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;AAC3B,QAAA,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;AACjD,KAAA;AACD,IAAA,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;AAKG;AACG,SAAU,WAAW,CAAC,IAAY,EAAA;AACtC,IAAA,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;AAIG;SACa,YAAY,GAAA;IAC1B,OAAOA,OAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;AAQG;AACa,SAAA,2BAA2B,CACzC,gBAA4B,EAC5B,SAAkB,EAAA;IAElB,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;AACxC,IAAA,gBAAgB,CAAC,OAAO,CAAC,CAAC,cAAc,KAAI;AAC1C,QAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AACvC,KAAC,CAAC,CAAC;AACH,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAqBD;;;;;AAKG;AACH;AACM,SAAU,iBAAiB,CAAC,OAAqB,EAAA;AACrD,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;AACtC,QAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;AACzD,KAAA;;IAED,OAAO,CAAC,EAAY,KAAU;QAC5B,OAAO;AACJ,aAAA,IAAI,CAAC,CAAC,IAAS,KAAI;;AAElB,YAAA,OAAO,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;AAC7B,SAAC,CAAC;AACD,aAAA,KAAK,CAAC,CAAC,GAAU,KAAI;;YAEpB,EAAE,CAAC,GAAG,CAAC,CAAC;AACV,SAAC,CAAC,CAAC;AACP,KAAC,CAAC;AACJ,CAAC;AAED;;;;AAIG;AACG,SAAU,wBAAwB,CACtC,OAAuC,EAAA;AAEvC,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;AACtC,QAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;AACzD,KAAA;IACD,OAAO,CAAC,EAAsB,KAAU;QACtC,OAAO;AACJ,aAAA,IAAI,CAAC,CAAC,IAA2B,KAAI;AACpC,YAAA,OAAO,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACnF,SAAC,CAAC;AACD,aAAA,KAAK,CAAC,CAAC,GAAU,KAAI;AACpB,YAAA,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;AAC5B,SAAC,CAAC,CAAC;AACP,KAAC,CAAC;AACJ,CAAC;AAEK,SAAU,kBAAkB,CAChC,GAAY,EACZ,WAAmB,EACnB,eAAwB,EACxB,YAAqB,EAAA;AAErB,IAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;AACvB,QAAA,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACb,KAAA;AAED,IAAA,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY,EAAE;AACrC,QAAA,OAAO,EAAE,CAAC,WAAW,GAAG,GAAG,EAAE,CAAC;AAC/B,KAAA;IAED,MAAM,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,GAAG,EAAE,CAAC;IACtC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,eAAe,GAAG,YAAY,EAAE,CAAC;AAC1D,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;AAIG;AACa,SAAA,WAAW,CAAC,eAAwB,EAAE,WAAkB,EAAA;IACtE,MAAM,mBAAmB,GAAG,eAE3B,CAAC;AACF,IAAA,WAAW,CAAC,OAAO,CAAC,CAAC,UAAU,KAAI;AACjC,QAAA,MAAM,CAAC,mBAAmB,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,KAAI;AAChE,YAAA,mBAAmB,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;AACnE,SAAC,CAAC,CAAC;AACL,KAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,mBAAmB,GACvB,qKAAqK,CAAC;AAExK;;;;AAIG;AACG,SAAU,UAAU,CAAC,KAAa,EAAA;AACtC,IAAA,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;AAMG;SACa,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB,EAAA;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;AAKG;AACG,SAAU,eAAe,CAAC,KAAc,EAAA;AAC5C,IAAA,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;AAEK,SAAU,mBAAmB,CAAC,IAAY,EAAA;AAC9C,IAAA,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AACrB,QAAA,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC1B,KAAA;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;AACxC,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAOD;;;AAGG;AACG,SAAU,QAAQ,CAAC,KAAc,EAAA;AACrC,IAAA,QACE,OAAO,KAAK,KAAK,QAAQ;AACzB,QAAA,KAAK,KAAK,IAAI;AACd,QAAA,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;AACrB,QAAA,EAAE,KAAK,YAAY,MAAM,CAAC;AAC1B,QAAA,EAAE,KAAK,YAAY,IAAI,CAAC,EACxB;AACJ;;AC/QA;AAQA;AAEA;;;;AAIG;MACU,UAAU,CAAA;AACrB,IAAA,WAAA;AACE;;AAEG;AACa,IAAA,YAAA,GAAuC,EAAE;AACzD;;AAEG;IACa,KAAe,EAAA;QAJf,IAAY,CAAA,YAAA,GAAZ,YAAY,CAA6B;QAIzC,IAAK,CAAA,KAAA,GAAL,KAAK,CAAU;KAC7B;AAEJ;;;;;;AAMG;AACH,IAAA,mBAAmB,CAAC,MAAc,EAAE,KAAc,EAAE,UAAkB,EAAA;AACpE,QAAA,MAAM,cAAc,GAAG,CACrB,cAAuC,EACvC,eAAoB,KACX;AACT,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,CAAA,EAAI,UAAU,CAAA,cAAA,EAAiB,KAAK,CAAA,iCAAA,EAAoC,cAAc,CAAA,GAAA,EAAM,eAAe,CAAA,CAAA,CAAG,CAC/G,CAAC;AACJ,SAAC,CAAC;AACF,QAAA,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YAC5C,MAAM,aAAa,GAAG,KAAe,CAAC;YACtC,MAAM,EACJ,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,SAAS,EACT,UAAU,EACV,OAAO,EACP,WAAW,GACZ,GAAG,MAAM,CAAC,WAAW,CAAC;AACvB,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;AACtE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;AACtE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;AACrE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;AACrE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;YACD,MAAM,YAAY,GAAG,KAAc,CAAC;YACpC,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;AAC3D,gBAAA,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;AAC7D,gBAAA,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;AACxC,aAAA;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;AAC3D,gBAAA,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;AAC7D,gBAAA,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;AACxC,aAAA;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,aAAa,GAAG,UAAU,KAAK,CAAC,EAAE;AAC/D,gBAAA,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAC1C,aAAA;AACD,YAAA,IAAI,OAAO,EAAE;AACX,gBAAA,MAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;AACpF,gBAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;AAC9D,oBAAA,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,IACE,WAAW;gBACX,YAAY,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,CAAS,EAAE,EAAc,KAAK,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EACnF;AACA,gBAAA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;AAC5C,aAAA;AACF,SAAA;KACF;AAED;;;;;;;;AAQG;IACH,SAAS,CACP,MAAc,EACd,MAAe,EACf,UAAmB,EACnB,UAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,cAAc,GAAgC;AAClD,YAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,YAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,YAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,OAAO,GAAQ,EAAE,CAAC;AACtB,QAAA,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;AACf,YAAA,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;AACrC,SAAA;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YAC5C,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;AACrB,YAAA,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;AAC9B,SAAA;;;;;;;;;;AAYD,QAAA,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;AAEtC,QAAA,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;AAChD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,qBAAA,CAAuB,CAAC,CAAC;AACvD,SAAA;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;AAChD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,6BAAA,CAA+B,CAAC,CAAC;AAC/D,SAAA;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;AACtD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,gBAAA,CAAkB,CAAC,CAAC;AAClD,SAAA;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;AAClB,SAAA;AAAM,aAAA;YACL,IAAI,UAAU,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,IAAI,EAAE;gBACvC,OAAO,GAAG,MAAM,CAAC;AAClB,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,+CAA+C,CAAC,KAAK,IAAI,EAAE;gBACrF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;AAC/D,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBAC/C,MAAM,UAAU,GAAe,MAAoB,CAAC;AACpD,gBAAA,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;AAChF,aAAA;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,sDAAsD,CAAC,KAAK,IAAI,EACjF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;AAC9D,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;AACpE,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;AACpE,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,qBAAqB,CAC7B,IAAI,EACJ,MAAwB,EACxB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAA0B,EAC1B,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAC9B,IAAI,EACJ,MAAyB,EACzB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;;;;;;;AAQG;IACH,WAAW,CACT,MAAc,EACd,YAAqB,EACrB,UAAkB,EAClB,UAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,cAAc,GAAgC;AAClD,YAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,YAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,YAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,YAAY,IAAI,SAAS,EAAE;AAC7B,YAAA,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;gBAIzE,YAAY,GAAG,EAAE,CAAC;AACnB,aAAA;;AAED,YAAA,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;AACrC,gBAAA,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;AACpC,aAAA;AACD,YAAA,OAAO,YAAY,CAAC;AACrB,SAAA;AAED,QAAA,IAAI,OAAY,CAAC;AACjB,QAAA,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;AACf,YAAA,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;AACrC,SAAA;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AAC7C,YAAA,OAAO,GAAG,wBAAwB,CAChC,IAAI,EACJ,MAAyB,EACzB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,SAAA;AAAM,aAAA;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;gBAC7C,MAAM,gBAAgB,GAAG,YAAuC,CAAC;AACjE;;;;AAIG;AACH,gBAAA,IACE,gBAAgB,CAAC,WAAW,CAAC,IAAI,SAAS;AAC1C,oBAAA,gBAAgB,CAAC,UAAU,CAAC,IAAI,SAAS,EACzC;AACA,oBAAA,YAAY,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;AAC7C,iBAAA;AACF,aAAA;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC1C,gBAAA,OAAO,GAAG,UAAU,CAAC,YAAsB,CAAC,CAAC;AAC7C,gBAAA,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;AACxB,iBAAA;AACF,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;AAChB,iBAAA;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;AACjB,iBAAA;AAAM,qBAAA;oBACL,OAAO,GAAG,YAAY,CAAC;AACxB,iBAAA;AACF,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,kDAAkD,CAAC,KAAK,IAAI,EAAE;gBACxF,OAAO,GAAG,YAAY,CAAC;AACxB,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,oCAAoC,CAAC,KAAK,IAAI,EAAE;AAC1E,gBAAA,OAAO,GAAG,IAAI,IAAI,CAAC,YAAsB,CAAC,CAAC;AAC5C,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACnD,gBAAA,OAAO,GAAG,cAAc,CAAC,YAAsB,CAAC,CAAC;AAClD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAGC,YAAmB,CAAC,YAAsB,CAAC,CAAC;AACvD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,oBAAoB,CAAC,YAAsB,CAAC,CAAC;AACxD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACnD,gBAAA,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAAwB,EACxB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;AACrD,gBAAA,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;AACrB,YAAA,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;AAC/B,SAAA;AAED,QAAA,OAAO,OAAO,CAAC;KAChB;AACF,CAAA;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU,EAAA;AACtC,IAAA,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;AACrB,IAAA,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;AAC1C,QAAA,EAAE,GAAG,CAAC;AACP,KAAA;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW,EAAA;IACpC,IAAI,CAAC,MAAM,EAAE;AACX,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;AACnC,QAAA,MAAM,IAAI,KAAK,CAAC,CAAA,uEAAA,CAAyE,CAAC,CAAC;AAC5F,KAAA;;IAED,MAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;IAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW,EAAA;IACvC,IAAI,CAAC,GAAG,EAAE;AACR,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC5C,QAAA,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;AACxF,KAAA;;AAED,IAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;;AAEhD,IAAA,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB,EAAA;IAClD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;AACtB,IAAA,IAAI,IAAI,EAAE;QACR,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAEjC,QAAA,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE;AAC3B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;AACzC,gBAAA,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AACvD,aAAA;AAAM,iBAAA;gBACL,YAAY,IAAI,IAAI,CAAC;AACrB,gBAAA,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;AACnB,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB,EAAA;IACtC,IAAI,CAAC,CAAC,EAAE;AACN,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AAED,IAAA,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACnC,QAAA,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;AAC3B,KAAA;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS,EAAA;IAC/B,IAAI,CAAC,CAAC,EAAE;AACN,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU,EAAA;AAC3E,IAAA,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AACxC,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAe,YAAA,EAAA,KAAK,CAA0B,wBAAA,CAAA,CAAC,CAAC;AAC9E,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC/C,YAAA,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAgB,aAAA,EAAA,KAAK,CAA2B,yBAAA,CAAA,CAAC,CAAC;AAChF,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;AAC7C,YAAA,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAgB,aAAA,EAAA,KAAK,CAA4C,0CAAA,CAAA,CAC/E,CAAC;AACH,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;AAChD,YAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAe,YAAA,EAAA,KAAK,CAA2B,yBAAA,CAAA,CAAC,CAAC;AAC/E,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC/C,YAAA,MAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;AACvB,gBAAA,UAAU,KAAK,UAAU;AACzB,gBAAA,EAAE,KAAK,YAAY,WAAW,CAAC;AAC/B,gBAAA,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;AAC1B,gBAAA,EAAE,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,OAAO,IAAI,KAAK,QAAQ,KAAK,KAAK,YAAY,IAAI,CAAC,EACpF;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,CAAA,qGAAA,CAAuG,CACrH,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU,EAAA;IAClF,IAAI,CAAC,aAAa,EAAE;AAClB,QAAA,MAAM,IAAI,KAAK,CACb,qDAAqD,UAAU,CAAA,iBAAA,CAAmB,CACnF,CAAC;AACH,KAAA;IACD,MAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,KAAI;AAC5C,QAAA,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;AACnD,SAAA;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;AACxB,KAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,MAAM,IAAI,KAAK,CACb,CAAG,EAAA,KAAK,6BAA6B,UAAU,CAAA,wBAAA,EAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,CAAA,CAAA,CAAG,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB,EAAA;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;AACtB,QAAA,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,4BAAA,CAA8B,CAAC,CAAC;AAC9D,SAAA;AACD,QAAA,WAAW,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;AAC7C,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB,EAAA;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;AACtB,QAAA,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,4BAAA,CAA8B,CAAC,CAAC;AAC9D,SAAA;AACD,QAAA,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;AAC9C,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB,EAAA;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;AACtC,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,0DAAA,CAA4D,CAAC,CAAC;AAC5F,aAAA;YACD,KAAK;AACH,gBAAA,KAAK,YAAY,IAAI;sBACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;AACtC,sBAAE,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACtD,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,0DAAA,CAA4D,CAAC,CAAC;AAC5F,aAAA;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AACrF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,oBAAoB,CAAC,KAAK,IAAI,EAAE;AACxD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,2DAAA,CAA6D,CAAC,CAAC;AAC7F,aAAA;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AACrF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAqE,mEAAA,CAAA;AAChF,oBAAA,CAAA,iDAAA,CAAmD,CACtD,CAAC;AACH,aAAA;AACD,YAAA,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AAC/B,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAsD,mDAAA,EAAA,KAAK,CAAI,EAAA,CAAA,CAC7E,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAC1B,QAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,uBAAA,CAAyB,CAAC,CAAC;AACzD,KAAA;AACD,IAAA,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AACxC,IAAA,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,CAAwD,sDAAA,CAAA;YACtD,CAA0C,uCAAA,EAAA,UAAU,CAAG,CAAA,CAAA,CAC1D,CAAC;AACH,KAAA;IACD,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACtC,QAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;AAE1F,QAAA,IAAI,KAAK,IAAI,WAAW,CAAC,YAAY,EAAE;AACrC,YAAA,MAAM,QAAQ,GAAG,WAAW,CAAC,kBAAkB;AAC7C,kBAAE,CAAA,MAAA,EAAS,WAAW,CAAC,kBAAkB,CAAE,CAAA;kBACzC,OAAO,CAAC;AACZ,YAAA,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;AACzC,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAQ,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,eAAe,CAAE,CAAC;AACtC,gBAAA,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,WAAW,CAAC,YAAY,EAAE,CAAC;AACtE,aAAA;AAAM,iBAAA;AACL,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;gBAClB,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;AACnD,gBAAA,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,WAAW,CAAC,YAAY,EAAE,CAAC;AACtE,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,SAAS,CAAC,CAAC,CAAC,GAAG,eAAe,CAAC;AAChC,SAAA;AACF,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,QAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,wBAAA,CAA0B,CAAC,CAAC;AAC1D,KAAA;AACD,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;AACpC,IAAA,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,CAA2D,yDAAA,CAAA;YACzD,CAA0C,uCAAA,EAAA,UAAU,CAAG,CAAA,CAAA,CAC1D,CAAC;AACH,KAAA;IACD,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AACrC,QAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;;AAE1F,QAAA,cAAc,CAAC,GAAG,CAAC,GAAG,iBAAiB,CAAC,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACrF,KAAA;;AAGD,IAAA,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;AAChC,QAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAS,MAAA,EAAA,MAAM,CAAC,kBAAkB,CAAA,CAAE,GAAG,OAAO,CAAC;QAE5F,MAAM,MAAM,GAAG,cAAc,CAAC;AAC9B,QAAA,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,EAAE,CAAC;AAC1D,QAAA,OAAO,MAAM,CAAC;AACf,KAAA;AAED,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;AAKG;AACH,SAAS,2BAA2B,CAClC,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,MAAM,oBAAoB,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IAE9D,IAAI,CAAC,oBAAoB,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE;QAClD,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,OAAO,WAAW,aAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,IAAI,CAAC,oBAAoB,CAAC;AAC/C,KAAA;AAED,IAAA,OAAO,oBAAoB,CAAC;AAC9B,CAAC;AAED;;;;;AAKG;AACH,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;IACxC,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,MAAM,IAAI,KAAK,CACb,yBAAyB,UAAU,CAAA,iCAAA,EAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,CAAA,EAAA,CAAI,CACN,CAAC;AACH,KAAA;AAED,IAAA,OAAO,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AAC5C,CAAC;AAED;;;;AAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,CAAmD,gDAAA,EAAA,MAAM,CAAC,IAAI,CAAC,SAAS,CAAI,EAAA,CAAA,CAAC,CAAC;AAC/F,SAAA;QACD,UAAU,GAAG,WAAW,KAAA,IAAA,IAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,IAAI,CAAC,eAAe,CAAC;QAC/C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,CAAqD,mDAAA,CAAA;AACnD,gBAAA,CAAA,QAAA,EAAW,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CACpC,WAAA,EAAA,MAAM,CAAC,IAAI,CAAC,SACd,CAAA,cAAA,EAAiB,UAAU,CAAA,EAAA,CAAI,CAClC,CAAC;AACH,SAAA;AACF,KAAA;AAED,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACzE,KAAA;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,MAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,YAAA,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;AACV,aAAA;AAED,YAAA,IAAI,QAA4B,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B,oBAAA,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;AACnC,iBAAA;AAAM,qBAAA;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;AACpE,iBAAA;AACF,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;AACjE,gBAAA,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;AAEvB,gBAAA,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;AAC5B,oBAAA,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IACE,WAAW,IAAI,SAAS;AACxB,yBAAC,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,CAAC,EACvE;AACA,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;AAC7B,qBAAA;AACD,oBAAA,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;AACvC,iBAAA;AACF,aAAA;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;AAC7B,gBAAA,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;AAChC,oBAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB;AACxC,0BAAE,CAAA,MAAA,EAAS,MAAM,CAAC,kBAAkB,CAAE,CAAA;0BACpC,OAAO,CAAC;AACZ,oBAAA,YAAY,CAAC,WAAW,CAAC,GACpB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,YAAY,CAAC,WAAW,CAAC,CAC5B,EAAA,EAAA,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAChC,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;AAClC,sBAAE,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;sBAChD,UAAU,CAAC;AAEjB,gBAAA,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AAC5F,gBAAA,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;AACA,oBAAA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;AACrC,iBAAA;AAED,gBAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;AAEF,gBAAA,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;AAC1D,oBAAA,MAAM,KAAK,GAAG,iBAAiB,CAAC,cAAc,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACjF,oBAAA,IAAI,KAAK,IAAI,cAAc,CAAC,cAAc,EAAE;;;;wBAI1C,YAAY,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;wBAC5D,YAAY,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;AACvD,qBAAA;AAAM,yBAAA,IAAI,KAAK,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/C,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,cAAe,GAAG,KAAK,EAAE,CAAC;AACtE,qBAAA;AAAM,yBAAA;AACL,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC;AAChC,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;QAED,MAAM,0BAA0B,GAAG,2BAA2B,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;AAC/F,QAAA,IAAI,0BAA0B,EAAE;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1C,YAAA,KAAK,MAAM,cAAc,IAAI,MAAM,EAAE;AACnC,gBAAA,MAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,cAAc,CAAC,CAAC;AAC5E,gBAAA,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,EACzC,OAAO,CACR,CAAC;AACH,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,OAAO,CAAC;AAChB,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,iBAAiB,CACxB,cAAsB,EACtB,eAAoB,EACpB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,CAAC,KAAK,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;AAC1C,QAAA,OAAO,eAAe,CAAC;AACxB,KAAA;AAED,IAAA,MAAM,QAAQ,GAAG,cAAc,CAAC,kBAAkB;AAChD,UAAE,CAAA,MAAA,EAAS,cAAc,CAAC,kBAAkB,CAAE,CAAA;UAC5C,OAAO,CAAC;IACZ,MAAM,YAAY,GAAG,EAAE,CAAC,QAAQ,GAAG,cAAc,CAAC,YAAY,EAAE,CAAC;AAEjE,IAAA,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACpD,QAAA,IAAI,eAAe,CAAC,WAAW,CAAC,EAAE;AAChC,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,MAAM,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAa,eAAe,CAAE,CAAC;AAC3C,YAAA,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;AACnC,YAAA,OAAO,MAAM,CAAC;AACf,SAAA;AACF,KAAA;IACD,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,IAAA,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;AAC7C,IAAA,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;AACnC,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB,EAAE,OAAoC,EAAA;AACtF,IAAA,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAClE,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;;IAEpC,MAAM,UAAU,GAAG,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,WAAW,CAAC;AACrD,IAAA,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;AACnF,KAAA;IAED,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,MAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,QAAA,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,EAAE,cAAc,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,cAAc,CAAC;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;AACpC,QAAA,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;AACzD,YAAA,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;AACxD,SAAA;AAED,QAAA,MAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;AAC3F,QAAA,IAAI,sBAAsB,EAAE;YAC1B,MAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAK,MAAM,SAAS,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AACjD,gBAAA,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;AAChD,oBAAA,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AAED,gBAAA,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACtC,aAAA;AACD,YAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;AAC5B,SAAA;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,WAAW,CAAC,EAAE;gBAC9D,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,WAAW,CAAC,CAAC,OAAQ,CAAC,EACnC,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,aAAA;iBAAM,IAAI,cAAc,CAAC,WAAW,EAAE;AACrC,gBAAA,IAAI,YAAY,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;oBAC1C,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;AAC1C,iBAAA;AAAM,qBAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;;;AAG3C,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC;AAC9B,iBAAA;AACF,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B;;;;;;;;;;;;;AAaE;AACF,oBAAA,MAAM,OAAO,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;AACvC,oBAAA,MAAM,WAAW,GAAG,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAG,cAAe,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,EAAE,CAAC;AACrD,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;AACF,oBAAA,oBAAoB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;AACrC,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;AAC7C,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,OAAO,CACR,CAAC;AACF,oBAAA,oBAAoB,CAAC,IAAI,CAAC,YAAa,CAAC,CAAC;AAC1C,iBAAA;AACF,aAAA;AACF,SAAA;AAAM,aAAA;;AAEL,YAAA,IAAI,gBAAgB,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;AAEvB,YAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACxB,gBAAA,IAAI,CAAC,GAAG;oBAAE,MAAM;AAChB,gBAAA,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;AACjB,aAAA;YACD,gBAAgB,GAAG,GAAG,CAAC;AACvB,YAAA,MAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;AAUtE,YAAA,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;AACA,gBAAA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;AAC1C,aAAA;AAED,YAAA,IAAI,eAAe,CAAC;;AAEpB,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;AAC7E,gBAAA,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;AACrC,gBAAA,MAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;;;AAGF,gBAAA,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;AAC7C,oBAAA,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;AAC3D,wBAAA,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACtB,qBAAA;AACF,iBAAA;gBACD,QAAQ,GAAG,aAAa,CAAC;AAC1B,aAAA;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;AACtF,gBAAA,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;AACF,gBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;AACjC,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,MAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACpE,IAAA,IAAI,0BAA0B,EAAE;AAC9B,QAAA,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,KAAa;AACjE,YAAA,KAAK,MAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;AAC5E,gBAAA,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;AACjC,oBAAA,OAAO,KAAK,CAAC;AACd,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACd,SAAC,CAAC;AAEF,QAAA,KAAK,MAAM,gBAAgB,IAAI,YAAY,EAAE;AAC3C,YAAA,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,EAC3C,OAAO,CACR,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,YAAY,EAAE;QACvB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3C,YAAA,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;AAC3B,gBAAA,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;AACnC,gBAAA,CAAC,oBAAoB,CAAC,GAAG,EAAE,OAAO,CAAC,EACnC;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;AACnC,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;AAEpC,IAAA,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;AAChC,IAAA,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,CAA2D,yDAAA,CAAA;YACzD,CAA0C,uCAAA,EAAA,UAAU,CAAE,CAAA,CACzD,CAAC;AACH,KAAA;AACD,IAAA,IAAI,YAAY,EAAE;QAChB,MAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3C,YAAA,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;AAC7F,SAAA;AACD,QAAA,OAAO,cAAc,CAAC;AACvB,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;AAEpC,IAAA,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AACpC,IAAA,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,CAAwD,sDAAA,CAAA;YACtD,CAA0C,uCAAA,EAAA,UAAU,CAAE,CAAA,CACzD,CAAC;AACH,KAAA;AACD,IAAA,IAAI,YAAY,EAAE;AAChB,QAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;AAEhC,YAAA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;AAC/B,SAAA;QAED,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACnC,OAAO,EACP,YAAY,CAAC,CAAC,CAAC,EACf,CAAA,EAAG,UAAU,CAAA,CAAA,EAAI,CAAC,CAAG,CAAA,CAAA,EACrB,OAAO,CACR,CAAC;AACH,SAAA;AACD,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD,EAAA;IAExD,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AAC5F,IAAA,IAAI,wBAAwB,EAAE;AAC5B,QAAA,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;AAClC,YAAA,MAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;AACnC,gBAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;AACjE,gBAAA,MAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;AAC7B,sBAAE,kBAAkB;AACpB,sBAAE,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,MAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;AACrF,gBAAA,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;AAC5B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB,EAAA;AAEvB,IAAA,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB,EAAA;AAClF,IAAA,QACE,QAAQ;AACR,QAAA,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;AACJ,CAAC;AA6TD;;AAEG;AACG,SAAU,eAAe,CAAC,WAAoB,EAAA;IAClD,MAAM,eAAe,GAAG,WAAsC,CAAC;IAC/D,IAAI,WAAW,IAAI,SAAS;AAAE,QAAA,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;AACrC,QAAA,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;AAClD,QAAA,OAAO,WAAW,CAAC;AACpB,KAAA;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;AACtC,QAAA,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,MAAM,KAAK,GAAG,EAAE,CAAC;AACjB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAAM,SAAA,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,MAAM,UAAU,GAA2B,EAAE,CAAC;AAC9C,QAAA,KAAK,MAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;AACnE,SAAA;AACD,QAAA,OAAO,UAAU,CAAC;AACnB,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;AAEG;AACH,SAAS,OAAO,CAAmB,CAAW,EAAA;IAC5C,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,IAAA,KAAK,MAAM,GAAG,IAAI,CAAC,EAAE;AACnB,QAAA,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;AACnB,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;AAEG;AACH;AACO,MAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;AACX,CAAA;;ACl+CD;AAmKM,SAAU,iBAAiB,CAAC,MAAe,EAAA;AAC/C,IAAA,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAOlB,CAAC;AACF,QAAA,IACE,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;AAClC,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,QAAQ;AACrC,YAAA,OAAO,UAAU,CAAC,OAAO,KAAK,QAAQ;AACtC,YAAA,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC;AACrC,YAAA,OAAO,UAAU,CAAC,yBAAyB,KAAK,UAAU;AAC1D,YAAA,OAAO,UAAU,CAAC,OAAO,KAAK,UAAU;AACxC,YAAA,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU,EACtC;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;AAKG;MACU,WAAW,CAAA;AAsGtB,IAAA,WAAA,CACE,GAAY,EACZ,MAAoB,EACpB,IAAc,EACd,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,kBAA4B,EAC5B,yBAAuC,EAAA;AAEvC,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,yBAAyB,GAAG,yBAAyB,CAAC;AAC3D,QAAA,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;AACrB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;AAC9B,QAAA,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;AAC/E,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;AACnB,QAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;AAC1B,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;AAChD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;AAC5B,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC3B,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,YAAY,EAAE,CAAC;KAC/E;AAED;;;;AAIG;IACH,yBAAyB,GAAA;AACvB,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,SAAA;AACD,QAAA,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;AACb,YAAA,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;AACjD,SAAA;KACF;AAED;;;;AAIG;AACH,IAAA,OAAO,CAAC,OAA8B,EAAA;QACpC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;AAC/C,SAAA;AAED,QAAA,IACE,OAAO,CAAC,MAAM,KAAK,SAAS;YAC5B,OAAO,CAAC,MAAM,KAAK,IAAI;YACvB,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAC5C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;AACrD,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;AACvC,YAAA,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;AACH,SAAA;AAED,QAAA,IACE,CAAC,OAAO,CAAC,YAAY,KAAK,SAAS;YACjC,OAAO,CAAC,YAAY,KAAK,IAAI;YAC7B,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;AACpD,aAAC,OAAO,CAAC,GAAG,KAAK,SAAS;gBACxB,OAAO,CAAC,GAAG,KAAK,IAAI;gBACpB,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EAC5C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,SAAA;;QAGD,IAAI,OAAO,CAAC,GAAG,EAAE;AACf,YAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;AACnC,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;AAC1D,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;AACxB,SAAA;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE;AAClB,YAAA,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;AAC3F,YAAA,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;AACrB,oBAAA,OAAO,CAAC,MAAM;oBACd,4CAA4C;AAC5C,oBAAA,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;AACH,aAAA;AACF,SAAA;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;QAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,YAAA,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC;AACjD,YAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;AACpC,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;AACnE,aAAA;AACD,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,gBAAA,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;AAClD,aAAA;AACD,YAAA,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,GAAG,GACL,OAAO;AACP,iBAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;iBACjC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC;YACxE,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;AACpD,YAAA,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,cAAc,EAAE;AACnB,oBAAA,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,CAAA,wEAAA,CAA0E,CACxG,CAAC;AACH,iBAAA;AACD,gBAAA,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI,EAAA;oBAC7B,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AACxC,oBAAA,MAAM,SAAS,GAAI,cAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;AAClB,wBAAA,SAAS,KAAK,SAAS;wBACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;AACA,wBAAA,MAAM,yBAAyB,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;AAC/E,wBAAA,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,CAAA,6BAAA,EAAgC,aAAa,CAAE,CAAA;AAC1E,4BAAA,CAAA,2CAAA,EAA8C,yBAAyB,CAAG,CAAA,CAAA;AAC1E,4BAAA,CAAA,uEAAA,EAA0E,aAAa,CAA6B,2BAAA,CAAA;4BACpH,CAAwC,qCAAA,EAAA,aAAa,CAA6D,2DAAA,CAAA,CACrH,CAAC;AACH,qBAAA;AAED,oBAAA,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC3C,wBAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;AACxD,qBAAA;AAED,oBAAA,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC3C,wBAAA,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;AACpB,4BAAA,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,CAAA,iEAAA,CAAmE,CAC3G,CAAC;AACH,yBAAA;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;AAC1C,yBAAA;AAAM,6BAAA;AACL,4BAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9D,yBAAA;AACF,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;AAChB,SAAA;;QAGD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;AAChD,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,CAA6E,2EAAA,CAAA;oBAC3E,CAAqF,mFAAA,CAAA;AACrF,oBAAA,CAAA,yIAAA,CAA2I,CAC9I,CAAC;AACH,aAAA;;AAED,YAAA,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;AAC5C,gBAAA,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;AACjB,aAAA;;YAED,MAAM,WAAW,GAAG,EAAE,CAAC;;AAEvB,YAAA,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;AAChB,YAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;AAC5C,gBAAA,MAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;AACxD,gBAAA,IAAI,UAAU,EAAE;AACd,oBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,wBAAA,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;AAC7D,qBAAA;AAAM,yBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AACzC,wBAAA,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;AACrB,4BAAA,MAAM,IAAI,KAAK,CACb,2BAA2B,cAAc,CAAA,iEAAA,CAAmE,CAC7G,CAAC;AACH,yBAAA;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;AAC/C,yBAAA;AAAM,6BAAA;AACL,4BAAA,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9E,4BAAA,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AACnE,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;;YAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,SAAA;;QAGD,IAAI,OAAO,CAAC,OAAO,EAAE;AACnB,YAAA,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AACrD,gBAAA,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;AACnD,aAAA;AACF,SAAA;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;AAC9C,SAAA;;AAED,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AAC5D,SAAA;;QAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;AACrE,SAAA;;AAGD,QAAA,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,IAAI,EAAE;;YAEvD,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;AAClD,iBAAA;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;AAC9D,iBAAA;AACF,aAAA;AAAM,iBAAA;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;AACH,iBAAA;AACD,gBAAA,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC1C,iBAAA;AACF,aAAA;AACF,SAAA;QAED,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACxC,SAAA;QAED,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;AAC9C,SAAA;AAED,QAAA,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACvC,QAAA,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;AACrD,QAAA,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;AAEjD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;AAGG;IACH,KAAK,GAAA;AACH,QAAA,MAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,yBAAyB,CAC/B,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;AACjB,YAAA,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;AACjC,SAAA;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;AAC3C,SAAA;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;AAC1B,YAAA,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;AACnD,SAAA;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;AAChC,YAAA,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;AAC/D,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;AChmBD;AAOA;;AAEG;MACU,QAAQ,CAAA;AAArB,IAAA,WAAA,GAAA;QACmB,IAAS,CAAA,SAAA,GAAwD,EAAE,CAAC;KAiItF;AA/HC;;AAEG;IACI,GAAG,GAAA;AACR,QAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;KAC/C;AAED;;AAEG;IACI,IAAI,GAAA;QACT,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KACpC;AAED;;;;AAIG;IACI,GAAG,CAAC,aAAqB,EAAE,cAAuB,EAAA;QACvD,MAAM,kBAAkB,GAAG,cAE1B,CAAC;AACF,QAAA,IAAI,aAAa,EAAE;AACjB,YAAA,IAAI,kBAAkB,KAAK,SAAS,IAAI,kBAAkB,KAAK,IAAI,EAAE;AACnE,gBAAA,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAChD,sBAAE,kBAAkB;AACpB,sBAAE,kBAAkB,CAAC,QAAQ,EAAE,CAAC;AAClC,gBAAA,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;AAC1C,aAAA;AAAM,iBAAA;AACL,gBAAA,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACtC,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,GAAG,CAAC,aAAqB,EAAA;AAC9B,QAAA,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;KAClE;AAED;;AAEG;IACI,QAAQ,GAAA;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;AAChB,QAAA,KAAK,MAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;AAC1C,YAAA,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;AACf,aAAA;YACD,MAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACrD,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,MAAM,gBAAgB,GAAG,EAAE,CAAC;AAC5B,gBAAA,KAAK,MAAM,qBAAqB,IAAI,cAAc,EAAE;oBAClD,gBAAgB,CAAC,IAAI,CAAC,CAAA,EAAG,aAAa,CAAI,CAAA,EAAA,qBAAqB,CAAE,CAAA,CAAC,CAAC;AACpE,iBAAA;AACD,gBAAA,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtC,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,CAAG,EAAA,aAAa,CAAI,CAAA,EAAA,cAAc,EAAE,CAAC;AAChD,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AAE9B,QAAA,IAAI,IAAI,EAAE;AACR,YAAA,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;AACxB,gBAAA,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,aAAA;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;AACxB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;AACpC,gBAAA,MAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;AACzC,gBAAA,QAAQ,YAAY;AAClB,oBAAA,KAAK,eAAe;AAClB,wBAAA,QAAQ,gBAAgB;AACtB,4BAAA,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;AAER,4BAAA,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;AAER,4BAAA;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;AACT,yBAAA;wBACD,MAAM;AAER,oBAAA,KAAK,gBAAgB;AACnB,wBAAA,QAAQ,gBAAgB;AACtB,4BAAA,KAAK,GAAG;AACN,gCAAA,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;AAER,4BAAA;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;AACT,yBAAA;wBACD,MAAM;AAER,oBAAA;AACE,wBAAA,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;AACzE,iBAAA;AACF,aAAA;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;AACrC,gBAAA,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;AAC3C,aAAA;AACF,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;AAED;;AAEG;MACU,UAAU,CAAA;AAOrB;;;AAGG;AACI,IAAA,SAAS,CAAC,MAA0B,EAAA;QACzC,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;AAC1B,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;AAC5B,SAAA;KACF;AAED;;AAEG;IACI,SAAS,GAAA;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAwB,EAAA;QACrC,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;AAClC,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAiC,EAAA;QAC9C,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;AACtD,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;AACnC,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAwB,EAAA;QACrC,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;YACL,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACxC,YAAA,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;gBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AAC9E,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;AACxB,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,UAAU,CAAC,IAAwB,EAAA;AACxC,QAAA,IAAI,IAAI,EAAE;AACR,YAAA,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;AACrD,YAAA,IAAI,WAAW,EAAE;AACf,gBAAA,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;AACpB,iBAAA;AAED,gBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;AACxB,oBAAA,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,iBAAA;AAED,gBAAA,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;AAC3B,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;AACxB,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;AAEG;AACI,IAAA,QAAQ,CAAC,KAAyB,EAAA;QACvC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;AACzB,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACrC,SAAA;KACF;AAED;;;;AAIG;IACI,iBAAiB,CAAC,kBAA0B,EAAE,mBAA4B,EAAA;AAC/E,QAAA,IAAI,kBAAkB,EAAE;AACtB,YAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,gBAAA,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AAC9B,aAAA;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;AAC1D,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,sBAAsB,CAAC,kBAA0B,EAAA;AACtD,QAAA,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;KACtE;AAED;;AAEG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;KACzD;AAED;;AAEG;IACK,GAAG,CAAC,IAAY,EAAE,UAA6B,EAAA;QACrD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;AAErD,QAAA,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;AACvB,YAAA,MAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;AACxD,YAAA,IAAI,SAA6B,CAAC;AAClC,YAAA,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI;AAChB,oBAAA,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;AAER,oBAAA,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;AAER,oBAAA,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;AAER,oBAAA,KAAK,MAAM;AACT,wBAAA,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;AACpC,wBAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;AAC1D,4BAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,yBAAA;wBACD,MAAM;AAER,oBAAA,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;AAER,oBAAA;wBACE,MAAM,IAAI,KAAK,CAAC,CAAA,2BAAA,EAA8B,KAAK,CAAC,IAAI,CAAE,CAAA,CAAC,CAAC;AAC/D,iBAAA;AACF,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;IACI,QAAQ,GAAA;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,MAAM,IAAI,CAAG,EAAA,IAAI,CAAC,OAAO,KAAK,CAAC;AAChC,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;AACtB,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,MAAM,IAAI,CAAI,CAAA,EAAA,IAAI,CAAC,KAAK,EAAE,CAAC;AAC5B,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;AACf,aAAA;AACD,YAAA,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;AACtB,SAAA;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAA,CAAE,CAAC;AACxC,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;AAGG;IACI,UAAU,CAAC,WAAmB,EAAE,YAAoB,EAAA;AACzD,QAAA,IAAI,WAAW,EAAE;AACf,YAAA,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACxE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACvE,SAAA;KACF;AAED;;AAEG;IACI,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;AAChC,QAAA,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;AACnC,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;MAMY,QAAQ,CAAA;IACnB,WAAmC,CAAA,IAAY,EAAkB,IAAkB,EAAA;QAAhD,IAAI,CAAA,IAAA,GAAJ,IAAI,CAAQ;QAAkB,IAAI,CAAA,IAAA,GAAJ,IAAI,CAAc;KAAI;IAEhF,OAAO,MAAM,CAAC,IAAY,EAAA;AAC/B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACrC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;KACpC;AACF,CAAA;AAED;;;AAGG;AACG,SAAU,uBAAuB,CAAC,SAAiB,EAAA;IACvD,MAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACtD,IAAA,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;SACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;AACtD,SAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;AACJ,CAAC;AAED;;AAEG;MACU,YAAY,CAAA;IAMvB,WAA4B,CAAA,KAAa,EAAE,KAAyB,EAAA;QAAxC,IAAK,CAAA,KAAA,GAAL,KAAK,CAAQ;AACvC,QAAA,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AAC5C,QAAA,IAAI,CAAC,aAAa,GAAG,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,GAAG,KAAK,GAAG,gBAAgB,CAAC;AACtF,QAAA,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;KACxB;AAED;;;AAGG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AAED;;AAEG;IACI,IAAI,GAAA;AACT,QAAA,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;AAC9B,YAAA,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;AAChC,SAAA;AAAM,aAAA;YACL,QAAQ,IAAI,CAAC,aAAa;AACxB,gBAAA,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;AAER,gBAAA,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;AAER,gBAAA;oBACE,MAAM,IAAI,KAAK,CAAC,CAAA,gCAAA,EAAmC,IAAI,CAAC,aAAa,CAAE,CAAA,CAAC,CAAC;AAC5E,aAAA;AACF,SAAA;AACD,QAAA,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;KAC7B;AACF,CAAA;AAED;;AAEG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAA;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;AAChB,IAAA,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAC5D,QAAA,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACjD,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;AAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB,EAAA;AAClD,IAAA,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;AAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB,EAAA;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;AAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa,EAAA;AAC3D,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;AACV,SAAA;AACD,QAAA,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;AACjC,KAAA;AACH,CAAC;AAED;;;AAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB,EAAA;AACvE,IAAA,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;AAClE,IAAA,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;AACpC,QAAA,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;AAClC,KAAA;AACD,IAAA,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;AAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC,EAAA;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;AAEhB,IAAA,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACrC,QAAA,MAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;AAChE,QAAA,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;AACP,SAAA;AAAM,aAAA;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,SAAA;AACF,KAAA;AAED,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;AAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB,EAAA;AACrD,IAAA,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC,SAAiB,KAAK,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzF,CAAC;AAED;;;AAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB,EAAE,GAAG,qBAA+B,EAAA;AACrF,IAAA,OAAO,SAAS,CACd,SAAS,EACT,CAAC,SAAiB,KAAK,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB,EAAA;AACzC,IAAA,MAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAClD,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB,EAAA;AAC/C,IAAA,MAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;AAC1E,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;AACxD,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AAAM,aAAA;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AACF,KAAA;AAAM,SAAA;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,QAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AAC1C,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AAAM,aAAA;AACL,YAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,SAAA;AACF,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;AAC1C,QAAA,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;AACvC,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,KAAA;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;IACvC,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB,EAAA;AACxC,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,KAAA;AAED,IAAA,MAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAChD,IAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC;;ACtqBA;SAWgB,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB,EAAA;AAEzB,IAAA,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY,CAAC;IACtE,IAAI,CAAC,IAAI,EAAE;AACT,QAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;AAClE,KAAA;AACD,IAAA,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,IAAI,CAAC,EAAE;AACpC,QAAA,MAAM,IAAI,KAAK,CAAC,6EAA6E,CAAC,CAAC;AAChG,KAAA;AACD,IAAA,MAAM,aAAa,GAAiC;AAClD,QAAA,KAAK,EAAE;AACL,YAAA,IAAI,EAAE,IAAI;YACV,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE;AACjD,SAAA;KACF,CAAC;AAEF,IAAA,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;AACpD,QAAA,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,CAAG,EAAA,aAAa,CAAC,QAAQ,CAAI,CAAA,EAAA,aAAa,CAAC,QAAQ,EAAE,CAAC;AACxF,KAAA;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,CAAA,CAAE,CAAC;AAC9D,KAAA;AAED,IAAA,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AAEpD,IAAA,MAAM,UAAU,GAAG;AACjB,QAAA,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;AAEF,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAEK,SAAU,UAAU,CAAC,GAAW,EAAA;AACpC,IAAA,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;AAC1D,IAAA,OAAO,SAAS,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;AAC7C,CAAC;SAEe,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAA2C,EAAA;IAE3C,IAAI,cAAc,IAAI,YAAY,EAAE;AAClC,QAAA,OAAOG,iBAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;AAC7C,KAAA;AAAM,SAAA,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;AAC1C,QAAA,OAAOA,iBAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;AAC5C,KAAA;AAAM,SAAA,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;AAC1C,QAAA,OAAOA,iBAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;AAC5C,KAAA;AAAM,SAAA;AACL,QAAA,OAAOA,iBAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;AAC3C,KAAA;AACH,CAAC;AAED,SAAS,WAAW,CAAC,IAAY,EAAA;;;AAG/B,IAAA,OAAO,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC;AACpC;;ACzEA;AAsBA,MAAM,cAAc,GAAG,UAAU,CAAC;AAElC,MAAM,yBAAyB,GAAG;IAChC,wBAAwB;IACxB,+BAA+B;IAC/B,gBAAgB;IAChB,6BAA6B;IAC7B,iBAAiB;IACjB,mBAAmB;IACnB,OAAO;IACP,0BAA0B;IAC1B,aAAa;IAEb,kCAAkC;IAClC,8BAA8B;IAC9B,8BAA8B;IAC9B,6BAA6B;IAC7B,+BAA+B;IAC/B,wBAAwB;IACxB,gCAAgC;IAChC,+BAA+B;IAC/B,QAAQ;IAER,QAAQ;IACR,iBAAiB;IACjB,eAAe;IACf,YAAY;IACZ,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,MAAM;IACN,SAAS;IACT,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,eAAe;IACf,QAAQ;IACR,YAAY;IACZ,aAAa;IACb,QAAQ;IACR,mBAAmB;IACnB,YAAY;IACZ,kBAAkB;CACnB,CAAC;AAEF,MAAM,6BAA6B,GAAa,CAAC,aAAa,CAAC,CAAC;MAEnD,SAAS,CAAA;IAIpB,WAAY,CAAA,EAAE,kBAAkB,GAAG,EAAE,EAAE,sBAAsB,GAAG,EAAE,EAAA,GAAuB,EAAE,EAAA;AACzF,QAAA,kBAAkB,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AACpD,cAAE,yBAAyB,CAAC,MAAM,CAAC,kBAAkB,CAAC;cACpD,yBAAyB,CAAC;AAE9B,QAAA,sBAAsB,GAAG,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC;AAC5D,cAAE,6BAA6B,CAAC,MAAM,CAAC,sBAAsB,CAAC;cAC5D,6BAA6B,CAAC;QAElC,IAAI,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,sBAAsB,GAAG,IAAI,GAAG,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;KAC3F;AAEM,IAAA,QAAQ,CAAC,GAAY,EAAA;AAC1B,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,EAAW,CAAC;QAChC,OAAO,IAAI,CAAC,SAAS,CACnB,GAAG,EACH,CAAC,GAAW,EAAE,KAAc,KAAI;;YAE9B,IAAI,KAAK,YAAY,KAAK,EAAE;AAC1B,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,KAAK,CAAA,EAAA,EACR,IAAI,EAAE,KAAK,CAAC,IAAI,EAChB,OAAO,EAAE,KAAK,CAAC,OAAO,EACtB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,GAAG,KAAK,aAAa,EAAE;AACzB,gBAAA,OAAO,IAAI,CAAC,eAAe,CAAC,KAAsB,CAAC,CAAC;AACrD,aAAA;iBAAM,IAAI,GAAG,KAAK,KAAK,EAAE;AACxB,gBAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAe,CAAC,CAAC;AAC1C,aAAA;iBAAM,IAAI,GAAG,KAAK,OAAO,EAAE;AAC1B,gBAAA,OAAO,IAAI,CAAC,aAAa,CAAC,KAAsB,CAAC,CAAC;AACnD,aAAA;iBAAM,IAAI,GAAG,KAAK,MAAM,EAAE;;AAEzB,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,GAAG,KAAK,UAAU,EAAE;;AAE7B,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,GAAG,KAAK,eAAe,EAAE;;;AAGlC,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE;AAClD,gBAAA,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;AACnB,oBAAA,OAAO,YAAY,CAAC;AACrB,iBAAA;AACD,gBAAA,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;AACjB,aAAA;AAED,YAAA,OAAO,KAAK,CAAC;SACd,EACD,CAAC,CACF,CAAC;KACH;AAEO,IAAA,eAAe,CAAC,KAAoB,EAAA;QAC1C,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;KAClF;AAEO,IAAA,aAAa,CAAC,KAAoB,EAAA;QACxC,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;KAChF;AAEO,IAAA,cAAc,CACpB,KAAoB,EACpB,WAAwB,EACxB,QAA0C,EAAA;QAE1C,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;AAC/C,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,SAAS,GAAkB,EAAE,CAAC;QAEpC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACpC,SAAS,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AACnC,aAAA;AAAM,iBAAA;AACL,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;AAC/B,aAAA;AACF,SAAA;AAED,QAAA,OAAO,SAAS,CAAC;KAClB;AAEO,IAAA,WAAW,CAAC,KAAa,EAAA;QAC/B,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;AAC/C,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAC3C,QAAA,MAAM,WAAW,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;QAE1C,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC1C,QAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE;AAC5B,YAAA,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;AACrD,gBAAA,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAC9B,aAAA;AACF,SAAA;QAED,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,QAAA,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;KAC9B;AACF;;ACtLD;AAKO,MAAM,MAAM,GAAGC,YAAO,CAAC,MAAM;;ACLpC;AAQA,MAAM,cAAc,GAAG,IAAI,SAAS,EAAE,CAAC;AAEvC;;AAEG;AACG,MAAO,SAAU,SAAQ,KAAK,CAAA;IA8BlC,WACE,CAAA,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAAA;QAEhC,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AACxB,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QAEzB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;KAClD;AAED;;AAEG;AACH,IAAA,CAAC,MAAM,CAAC,GAAA;AACN,QAAA,OAAO,CAAc,WAAA,EAAA,IAAI,CAAC,OAAO,CAAO,IAAA,EAAA,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA,CAAE,CAAC;KACzE;;AAnDD;;AAEG;AACa,SAAkB,CAAA,kBAAA,GAAW,oBAAoB,CAAC;AAClE;;AAEG;AACa,SAAW,CAAA,WAAA,GAAW,aAAa;;ACrBrD;AAGO,MAAM,MAAM,GAAGC,2BAAkB,CAAC,WAAW,CAAC;;ACHrD;AAsBA,SAAS,cAAc,CACrB,OAAgB,EAChB,UAAsB,EAAA;AAEtB,IAAA,OAAO,OAAO,GAAG,UAAU,CAAC,UAAU,GAAG,UAAU,CAAC,SAAS,CAAC;AAChE,CAAC;AA+BK,MAAO,eAAgB,SAAQC,gBAAS,CAAA;AAS5C,IAAA,WAAA,CAAoB,gBAA2D,EAAA;AAC7E,QAAA,KAAK,EAAE,CAAC;QADU,IAAgB,CAAA,gBAAA,GAAhB,gBAAgB,CAA2C;QARvE,IAAW,CAAA,WAAA,GAAW,CAAC,CAAC;KAU/B;AATD,IAAA,UAAU,CAAC,KAAsB,EAAE,SAAiB,EAAE,QAA4B,EAAA;AAChF,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACjB,QAAA,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,gBAAiB,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,QAAQ,CAAC,SAAS,CAAC,CAAC;KACrB;AAKF,CAAA;AAED,SAAS,gBAAgB,CAAC,IAAS,EAAA;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB,EAAE,OAAyB,EAAA;AACnE,IAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;AAC7B,QAAA,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAK;AACxB,YAAA,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,KAAK,EAAE,CAAC;AACjB,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;AACH,QAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;AAC5B,QAAA,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAChC,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;AAEG;AACG,SAAU,YAAY,CAAC,OAAgB,EAAA;AAC3C,IAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,KAAI;AAC7B,QAAA,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;AAC9B,KAAC,CAAC,CAAC;AAEH,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;AAEG;MACU,mBAAmB,CAAA;AAAhC,IAAA,WAAA,GAAA;;AAkMU,QAAA,IAAA,CAAA,aAAa,GAA4B,IAAI,GAAG,EAAE,CAAC;QACnD,IAAe,CAAA,eAAA,GAAe,EAAE,CAAC;KAmF1C;AArRC;;;;AAIG;IACH,MAAM,WAAW,CAAC,WAA4B,EAAA;;AAC5C,QAAA,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACnD,YAAA,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;AACH,SAAA;AAED,QAAA,MAAMC,iBAAe,GAAG,IAAIC,+BAAe,EAAE,CAAC;AAC9C,QAAA,IAAI,aAAiD,CAAC;QACtD,IAAI,WAAW,CAAC,WAAW,EAAE;AAC3B,YAAA,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;AACnC,gBAAA,MAAM,IAAIC,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,aAAa,GAAG,CAAC,KAAY,KAAI;AAC/B,gBAAA,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oBAC1BF,iBAAe,CAAC,KAAK,EAAE,CAAC;AACzB,iBAAA;AACH,aAAC,CAAC;YACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;AAClE,SAAA;QAED,IAAI,WAAW,CAAC,OAAO,EAAE;YACvB,UAAU,CAAC,MAAK;gBACdA,iBAAe,CAAC,KAAK,EAAE,CAAC;AAC1B,aAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;AACzB,SAAA;QAED,IAAI,WAAW,CAAC,QAAQ,EAAE;AACxB,YAAA,MAAM,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;AAC3C,YAAA,MAAM,WAAW,GAAG,IAAIG,4BAAQ,EAAE,CAAC;AACnC,YAAA,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,KAAU;;AAExD,gBAAA,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oBAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;AACjB,iBAAA;AACD,gBAAA,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;AACA,oBAAA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;AACrD,iBAAA;AAAM,qBAAA;AACL,oBAAA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;AAChC,iBAAA;AACH,aAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;AAC3C,gBAAA,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;AACpC,gBAAA,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AAC5B,oBAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,qBAAA;AACF,iBAAA;AAAM,qBAAA;AACL,oBAAA,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AACrC,iBAAA;AACF,aAAA;AAED,YAAA,WAAW,CAAC,IAAI,GAAG,WAAW,CAAC;AAC/B,YAAA,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;YACjC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;AACpE,gBAAA,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;AACjD,oBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,CAAiC,8BAAA,EAAA,WAAW,CAAC,WAAW,EAAE,CAAA,CAAE,CAC7D,CAAC;AACH,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AAC5C,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,IAAI,IAAI,GAAG,WAAW,CAAC,IAAI;AACzB,cAAE,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;AACtC,kBAAE,WAAW,CAAC,IAAI,EAAE;kBAClB,WAAW,CAAC,IAAI;cAClB,SAAS,CAAC;AACd,QAAA,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;AACpD,YAAA,MAAM,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;AACtD,YAAA,MAAM,kBAAkB,GAAG,IAAI,eAAe,CAAC,gBAAgB,CAAC,CAAC;AACjE,YAAA,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;AAC1B,gBAAA,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA;AACL,gBAAA,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC9B,aAAA;YAED,IAAI,GAAG,kBAAkB,CAAC;AAC3B,SAAA;QAED,MAAM,2BAA2B,GAAyB,MAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,CAAC;AAEF,QAAA,MAAM,WAAW,GACf,MAAA,CAAA,MAAA,CAAA,EAAA,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM;;;;YAI1B,MAAM,EAAEH,iBAAe,CAAC,MAAa,EACrC,QAAQ,EAAE,QAAQ,EAAA,EACf,2BAA2B,CAC/B,CAAC;AAEF,QAAA,IAAI,iBAAoD,CAAC;QACzD,IAAI;AACF,YAAA,MAAM,QAAQ,GAAmB,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEhF,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AAE/C,YAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,WAAW,CAAC,yBAAyB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC3D,WAAW,CAAC,kBAAkB,CAAC;AAEjC,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,OAAO,EAAE,WAAW;gBACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;AACvB,gBAAA,kBAAkB,EAAE,SAAS;sBACxB,QAAQ,CAAC,IAAyC;AACrD,sBAAE,SAAS;AACb,gBAAA,UAAU,EAAE,CAAC,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,GAAG,SAAS;aAC3D,CAAC;AAEF,YAAA,MAAM,kBAAkB,GAAG,WAAW,CAAC,kBAAkB,CAAC;AAC1D,YAAA,IAAI,kBAAkB,EAAE;AACtB,gBAAA,MAAM,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;AAExF,gBAAA,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;AAClC,oBAAA,MAAM,oBAAoB,GAAG,IAAI,eAAe,CAAC,kBAAkB,CAAC,CAAC;AACrE,oBAAA,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACxC,oBAAA,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;AAC7D,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;AACrE,oBAAA,IAAI,MAAM,EAAE;;AAEV,wBAAA,kBAAkB,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;AAC7C,qBAAA;AACF,iBAAA;AACF,aAAA;AAED,YAAA,MAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAE7C,YAAA,OAAO,iBAAiB,CAAC;AAC1B,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,UAAU,GAAe,KAAK,CAAC;AACrC,YAAA,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;AACnC,gBAAA,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;AACH,aAAA;AAAM,iBAAA,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;AACxC,gBAAA,MAAM,IAAIE,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,MAAM,UAAU,CAAC;AAClB,SAAA;AAAS,gBAAA;;AAER,YAAA,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;AAC5C,gBAAA,IAAI,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;AACzC,gBAAA,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;AAC1B,oBAAA,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;AAC3C,iBAAA;AACD,gBAAA,IAAI,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBAC3C,IAAI,gBAAgB,CAAC,iBAAiB,KAAjB,IAAA,IAAA,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;oBAC3D,kBAAkB,GAAG,gBAAgB,CACnC,iBAAkB,CAAC,kBAAkB,EACrCF,iBAAe,CAChB,CAAC;AACH,iBAAA;gBAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;qBAChD,IAAI,CAAC,MAAK;;oBACT,CAAA,EAAA,GAAA,WAAW,CAAC,WAAW,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,mBAAmB,CAAC,OAAO,EAAE,aAAc,CAAC,CAAC;oBACtE,OAAO;AACT,iBAAC,CAAC;AACD,qBAAA,KAAK,CAAC,CAAC,CAAC,KAAI;AACX,oBAAA,MAAM,CAAC,OAAO,CAAC,qDAAqD,EAAE,CAAC,CAAC,CAAC;AAC3E,iBAAC,CAAC,CAAC;AACN,aAAA;AACF,SAAA;KACF;AAMO,IAAA,gBAAgB,CAAC,WAA4B,EAAA;;QACnD,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;;;;QAK5C,IAAI,WAAW,CAAC,aAAa,EAAE;AAC7B,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC,aAAa,CAAC;YACrE,MAAM,GAAG,GAAG,CAAA,EAAG,IAAI,CAAA,CAAA,EAAI,IAAI,CAAA,CAAA,EAAI,QAAQ,CAAA,CAAA,EAAI,QAAQ,CAAA,CAAE,CAAC;AACtD,YAAA,MAAM,WAAW,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,EAAE,CAAC;YAEtD,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACjD,YAAA,IAAI,KAAK,EAAE;AACT,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAED,YAAA,MAAM,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;AAEF,YAAA,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;YACrB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,gBAAA,WAAW,CAAC,UAAU,GAAG,MAAM,CAAC,KAAoB,CAAC;AACtD,aAAA;AAAM,iBAAA;AACL,gBAAA,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;AACtC,aAAA;YACD,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;AAEzC,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;aAAM,IAAI,WAAW,CAAC,SAAS,EAAE;YAChC,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;AAC1D,YAAA,IAAI,KAAK,EAAE;AACT,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAED,YAAA,MAAM,YAAY,GAA2C;gBAC3D,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;AAEF,YAAA,IAAI,OAAO,EAAE;AACX,gBAAA,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,GAAG,IAAII,gBAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACzE,aAAA;AAAM,iBAAA;AACL,gBAAA,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,IAAIC,eAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACvE,aAAA;AAED,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,OAAO,GAAGD,gBAAK,CAAC,WAAW,GAAGC,eAAI,CAAC,WAAW,CAAC;AACvD,SAAA;KACF;AAED;;AAEG;;AAEH,IAAA,MAAM,KAAK,CAAC,KAAwB,EAAE,IAAwB,EAAA;AAC5D,QAAA,OAAOC,8BAAU,CAAC,KAAK,EAAE,IAAI,CAAuC,CAAC;KACtE;AAED;;AAEG;IACH,MAAM,cAAc,CAAC,WAA4B,EAAA;QAC/C,MAAM,WAAW,GAA+D,EAAE,CAAC;;QAGnF,WAAW,CAAC,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAEvD,QAAA,WAAW,CAAC,QAAQ,GAAG,WAAW,CAAC,kBAAkB,CAAC;AAEtD,QAAA,OAAO,WAAW,CAAC;KACpB;AAED;;AAEG;IACH,MAAM,cAAc,CAAC,kBAAyC,EAAA;;KAE7D;AACF;;AC7XD;AACA;AAEA;;AAEG;AACSC,sCAoBX;AApBD,CAAA,UAAY,oBAAoB,EAAA;AAC9B;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,KAAA,CAAA,GAAA,CAAA,CAAA,GAAA,KAAG,CAAA;AAEH;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AAEL;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,SAAA,CAAA,GAAA,CAAA,CAAA,GAAA,SAAO,CAAA;AAEP;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,MAAA,CAAA,GAAA,CAAA,CAAA,GAAA,MAAI,CAAA;AACN,CAAC,EApBWA,4BAAoB,KAApBA,4BAAoB,GAoB/B,EAAA,CAAA,CAAA;;AC1BD;AAwDA;;;;AAIG;AACG,SAAU,oCAAoC,CAClD,IAAO,EAAA;AAEP,IAAA,MAAM,EAAE,cAAc,EAAE,cAAc,EAA2B,GAAA,IAAI,EAA1B,iBAAiB,GAAKC,YAAA,CAAA,IAAI,EAA/D,CAAA,gBAAA,EAAA,gBAAA,CAAwD,CAAO,CAAC;IAEtE,IAAI,MAAM,GAAuB,iBAAiB,CAAC;AAEnD,IAAA,IAAI,cAAc,EAAE;AAClB,QAAA,MAAM,GAAQ,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,MAAM,CAAK,EAAA,cAAc,CAAE,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,cAAc,EAAE;AAClB,QAAA,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC,cAAc,CAAC;;QAEtD,MAAM,CAAC,WAAW,GAAI,cAAsB,KAAA,IAAA,IAAtB,cAAc,KAAd,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,cAAc,CAAU,WAAW,CAAC;AAC3D,KAAA;AAED,IAAA,OAAO,MAAM,CAAC;AAChB;;AC/EA;AA0BA;;AAEG;MACmB,iBAAiB,CAAA;AACrC;;AAEG;AACH,IAAA,WAAA;AACE;;AAEG;IACM,WAA0B;AACnC;;AAEG;IACM,QAAkC,EAAA;QAJlC,IAAW,CAAA,WAAA,GAAX,WAAW,CAAe;QAI1B,IAAQ,CAAA,QAAA,GAAR,QAAQ,CAA0B;KACzC;AAQJ;;;;AAIG;AACI,IAAA,SAAS,CAAC,QAA8B,EAAA;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;KAC1C;AAED;;;;;AAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe,EAAA;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;KACtC;AACF,CAAA;AAsBD;;AAEG;MACU,oBAAoB,CAAA;AAC/B,IAAA,WAAA,CAAoB,OAA4B,EAAA;QAA5B,IAAO,CAAA,OAAA,GAAP,OAAO,CAAqB;KAAI;AAEpD;;;;AAIG;AACI,IAAA,SAAS,CAAC,QAA8B,EAAA;AAC7C,QAAA,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAKD,4BAAoB,CAAC,GAAG;AACrC,YAAA,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;KACH;AAED;;;;;AAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe,EAAA;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;AACrC,SAAA;KACF;AACF;;ACxHD;AAMA;AACA;AACA;AACA;AACA,MAAM,sBAAsB,GAAqB;AAC/C,IAAA,eAAe,EAAE,KAAK;AACtB,IAAA,IAAI,EAAE,KAAK;AACX,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,aAAa,EAAE,KAAK;AACpB,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,aAAa,EAAE,IAAI;AACnB,IAAA,WAAW,EAAE,KAAK;AAClB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,YAAY,EAAE,IAAI;AAClB,IAAA,SAAS,EAAE,SAAS;AACpB,IAAA,KAAK,EAAE,KAAK;AACZ,IAAA,gBAAgB,EAAE,KAAK;AACvB,IAAA,qBAAqB,EAAE,KAAK;AAC5B,IAAA,QAAQ,EAAE,IAAI;AACd,IAAA,eAAe,EAAE,KAAK;AACtB,IAAA,iBAAiB,EAAE,KAAK;AACxB,IAAA,KAAK,EAAE,KAAK;AACZ,IAAA,MAAM,EAAE,IAAI;AACZ,IAAA,kBAAkB,EAAE,SAAS;AAC7B,IAAA,mBAAmB,EAAE,SAAS;AAC9B,IAAA,iBAAiB,EAAE,SAAS;AAC5B,IAAA,eAAe,EAAE,SAAS;AAC1B,IAAA,QAAQ,EAAE,MAAM;AAChB,IAAA,MAAM,EAAE;AACN,QAAA,OAAO,EAAE,KAAK;AACd,QAAA,QAAQ,EAAE,OAAO;AACjB,QAAA,UAAU,EAAE,IAAI;AACjB,KAAA;AACD,IAAA,OAAO,EAAE,SAAS;AAClB,IAAA,UAAU,EAAE;AACV,QAAA,MAAM,EAAE,IAAI;AACZ,QAAA,MAAM,EAAE,IAAI;AACZ,QAAA,OAAO,EAAE,IAAI;AACd,KAAA;AACD,IAAA,QAAQ,EAAE,KAAK;AACf,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,QAAQ,EAAE,EAAE;AACZ,IAAA,KAAK,EAAE,KAAK;CACb,CAAC;AAEF;AACA,MAAM,oBAAoB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC5E,oBAAoB,CAAC,aAAa,GAAG,KAAK,CAAC;AAE3C;AACA,MAAM,qBAAqB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC7E,qBAAqB,CAAC,aAAa,GAAG,KAAK,CAAC;AAC5C,qBAAqB,CAAC,UAAU,GAAG;AACjC,IAAA,MAAM,EAAE,KAAK;CACd,CAAC;AAEF;;;;AAIG;SACa,YAAY,CAAC,GAAY,EAAE,OAA0B,EAAE,EAAA;;AACrE,IAAA,qBAAqB,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC/C,qBAAqB,CAAC,OAAO,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;IAC/D,MAAM,OAAO,GAAG,IAAIE,iBAAM,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;AAC1D,IAAA,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED;;;;AAIG;SACa,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE,EAAA;;IAChE,oBAAoB,CAAC,YAAY,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;IACvD,oBAAoB,CAAC,OAAO,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;IAC9D,MAAM,SAAS,GAAG,IAAIA,iBAAM,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAC1D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;QACrC,IAAI,CAAC,GAAG,EAAE;AACR,YAAA,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;AACxC,SAAA;AAAM,aAAA;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAQ,EAAE,GAAQ,KAAI;AAChD,gBAAA,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,iBAAA;AAAM,qBAAA;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;AACd,iBAAA;AACH,aAAC,CAAC,CAAC;AACJ,SAAA;AACH,KAAC,CAAC,CAAC;AACL;;AChGA;AA+CA;;;AAGG;AACa,SAAA,qBAAqB,CACnC,2BAAyD,EACzD,cAAkC,EAAA;IAElC,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,qBAAqB,CAC9B,UAAU,EACV,OAAO,EACP,2BAA2B,EAC3B,cAAc,CACf,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAEM,MAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AAClE,MAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAE3E,MAAM,6BAA6B,GAA2B;AACnE,IAAA,oBAAoB,EAAE;AACpB,QAAA,IAAI,EAAE,uBAAuB;AAC7B,QAAA,GAAG,EAAE,sBAAsB;AAC5B,KAAA;CACF,CAAC;AAEF;;;AAGG;AACG,MAAO,qBAAsB,SAAQ,iBAAiB,CAAA;AAK1D,IAAA,WAAA,CACE,UAAyB,EACzB,oBAA0C,EAC1C,2BAAyD,EACzD,iBAAoC,EAAE,EAAA;;AAEtC,QAAA,KAAK,CAAC,UAAU,EAAE,oBAAoB,CAAC,CAAC;AAExC,QAAA,IAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;AAC/F,QAAA,IAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;QAC7F,IAAI,CAAC,UAAU,GAAG,CAAA,EAAA,GAAA,cAAc,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;KAC5D;IAEM,MAAM,WAAW,CAAC,OAAwB,EAAA;QAC/C,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA+B,KAChF,uBAAuB,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,eAAe,EAAE,QAAQ,EAAE;YAC7E,UAAU,EAAE,IAAI,CAAC,UAAU;AAC5B,SAAA,CAAC,CACH,CAAC;KACH;AACF,CAAA;AAED,SAAS,oBAAoB,CAC3B,cAAqC,EAAA;AAErC,IAAA,IAAI,MAAqC,CAAC;AAC1C,IAAA,MAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;AACxD,IAAA,MAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;AACvE,IAAA,IAAI,aAAa,EAAE;AACjB,QAAA,MAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;AACzD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;AACjE,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC,EAAA;AACtE,IAAA,MAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;AAC3C,IAAA,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;AACf,KAAA;AAAM,SAAA,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;AAC5B,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;AAC5C,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;AAOG;AACG,SAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B,EAC/B,OAAA,GAA6B,EAAE,EAAA;;AAE/B,IAAA,MAAM,cAAc,GAAgC;AAClD,QAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,QAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,QAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;KAC9C,CAAC;AACF,IAAA,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,EAAE,cAAc,CAAC,CAAC,IAAI,CAC5E,CAAC,cAAc,KAAI;AACjB,QAAA,IAAI,CAAC,yBAAyB,CAAC,cAAc,CAAC,EAAE;AAC9C,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;AAED,QAAA,MAAM,aAAa,GAAG,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;AAC3D,QAAA,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE;AAC9C,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;AAED,QAAA,MAAM,YAAY,GAAG,oBAAoB,CAAC,cAAc,CAAC,CAAC;AAE1D,QAAA,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE,GAAG,mBAAmB,CACzD,cAAc,EACd,aAAa,EACb,YAAY,CACb,CAAC;AACF,QAAA,IAAI,KAAK,EAAE;AACT,YAAA,MAAM,KAAK,CAAC;AACb,SAAA;AAAM,aAAA,IAAI,oBAAoB,EAAE;AAC/B,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;;;AAID,QAAA,IAAI,YAAY,EAAE;YAChB,IAAI,YAAY,CAAC,UAAU,EAAE;AAC3B,gBAAA,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;AACxD,gBAAA,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBACpF,kBAAkB;wBAChB,OAAO,kBAAkB,KAAK,QAAQ;8BAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;8BAC3D,EAAE,CAAC;AACV,iBAAA;gBACD,IAAI;AACF,oBAAA,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,EACzB,OAAO,CACR,CAAC;AACH,iBAAA;AAAC,gBAAA,OAAO,UAAe,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,SAAS,UAAU,CAAA,8CAAA,EAAiD,cAAc,CAAC,UAAU,CAAA,CAAE,EAC/F,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;AACF,oBAAA,MAAM,SAAS,CAAC;AACjB,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;AAE9C,gBAAA,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;AAC7E,aAAA;YAED,IAAI,YAAY,CAAC,aAAa,EAAE;gBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,MAAM,EAAE,EAC/B,4BAA4B,EAC5B,OAAO,CACR,CAAC;AACH,aAAA;AACF,SAAA;AAED,QAAA,OAAO,cAAc,CAAC;AACxB,KAAC,CACF,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,aAA4B,EAAA;IACxD,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;AACjE,IAAA,QACE,mBAAmB,CAAC,MAAM,KAAK,CAAC;AAChC,SAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,EAC1E;AACJ,CAAC;AAED,SAAS,mBAAmB,CAC1B,cAAqC,EACrC,aAA4B,EAC5B,YAA2C,EAAA;;AAE3C,IAAA,MAAM,iBAAiB,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC;AACtF,IAAA,MAAM,oBAAoB,GAAY,oBAAoB,CAAC,aAAa,CAAC;AACvE,UAAE,iBAAiB;AACnB,UAAE,CAAC,CAAC,YAAY,CAAC;AAEnB,IAAA,IAAI,oBAAoB,EAAE;AACxB,QAAA,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;gBACzB,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AACrD,aAAA;AACF,SAAA;AAAM,aAAA;YACL,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AACrD,SAAA;AACF,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,YAAY,KAAA,IAAA,IAAZ,YAAY,KAAA,KAAA,CAAA,GAAZ,YAAY,GAAI,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;AAC1E,IAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,cAAc,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,cAAc,CAAC,MAAM,CAAC;AAC5E,QAAA,cAAc,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC5C,MAAM,mBAAmB,GAAG,SAAS;AACnC,UAAE,CAAA,wBAAA,EAA2B,cAAc,CAAC,MAAM,CAAE,CAAA;AACpD,UAAG,cAAc,CAAC,UAAqB,CAAC;AAE1C,IAAA,MAAM,KAAK,GAAG,IAAI,SAAS,CACzB,mBAAmB,EACnB,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;;;IAIF,IAAI,CAAC,iBAAiB,EAAE;AACtB,QAAA,MAAM,KAAK,CAAC;AACb,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,UAAU,CAAC;AACvD,IAAA,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,aAAa,CAAC;IAE7D,IAAI;;;QAGF,IAAI,cAAc,CAAC,UAAU,EAAE;AAC7B,YAAA,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;AAC7C,YAAA,IAAI,WAAW,CAAC;AAChB,YAAA,IAAI,iBAAiB,EAAE;gBACrB,IAAI,kBAAkB,GAAQ,UAAU,CAAC;AACzC,gBAAA,IAAI,aAAa,CAAC,KAAK,IAAI,iBAAiB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBAC9E,kBAAkB;AAChB,wBAAA,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,CAAC,iBAAiB,CAAC,cAAe,CAAC,GAAG,EAAE,CAAC;AACvF,iBAAA;AACD,gBAAA,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAChD,iBAAiB,EACjB,kBAAkB,EAClB,2BAA2B,CAC5B,CAAC;AACH,aAAA;YAED,MAAM,aAAa,GAAQ,UAAU,CAAC,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;AACzE,YAAA,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;YAChC,IAAI,aAAa,CAAC,OAAO,EAAE;AACzB,gBAAA,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;AACvC,aAAA;AAED,YAAA,IAAI,iBAAiB,EAAE;AACrB,gBAAA,KAAK,CAAC,QAAS,CAAC,UAAU,GAAG,WAAW,CAAC;AAC1C,aAAA;AACF,SAAA;;AAGD,QAAA,IAAI,cAAc,CAAC,OAAO,IAAI,oBAAoB,EAAE;YAClD,KAAK,CAAC,QAAS,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAClE,oBAAoB,EACpB,cAAc,CAAC,OAAO,CAAC,MAAM,EAAE,EAC/B,4BAA4B,CAC7B,CAAC;AACH,SAAA;AACF,KAAA;AAAC,IAAA,OAAO,YAAiB,EAAE;AAC1B,QAAA,KAAK,CAAC,OAAO,GAAG,CAAA,OAAA,EAAU,YAAY,CAAC,OAAO,CAAA,gDAAA,EAAmD,cAAc,CAAC,UAAU,CAAA,2BAAA,CAA6B,CAAC;AACzJ,KAAA;AAED,IAAA,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AAChD,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC,EACxC,IAAiC,EAAA;;AAEjC,IAAA,MAAM,YAAY,GAAG,CAAC,GAA6B,KAAoB;QACrE,MAAM,GAAG,GAAG,CAAU,OAAA,EAAA,GAAG,gDAAgD,iBAAiB,CAAC,UAAU,CAAA,CAAA,CAAG,CAAC;QACzG,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;AAClD,QAAA,MAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,CAClB,CAAC;AACF,QAAA,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC3B,KAAC,CAAC;AAEF,IAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,iBAAiB,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,iBAAiB,CAAC,MAAM,CAAC;AAClF,QAAA,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAC/C,IAAA,IAAI,CAAC,SAAS,IAAI,iBAAiB,CAAC,UAAU,EAAE;AAC9C,QAAA,MAAM,IAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;AAC1C,QAAA,MAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,MAAM,iBAAiB,GAAa,CAAC,WAAW;AAC9C,cAAE,EAAE;cACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,KAAK,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;AACvE,QAAA,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;AAC9B,YAAA,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACjF;AACA,YAAA,OAAO,IAAI,OAAO,CAAwB,CAAC,OAAO,KAAI;gBACpD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC7B,aAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACxB,SAAA;AAAM,aAAA,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;AAC3F,YAAA,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;AACxB,iBAAA,IAAI,CAAC,CAAC,IAAI,KAAI;AACb,gBAAA,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;AACpC,gBAAA,OAAO,iBAAiB,CAAC;AAC3B,aAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;AACxB,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C;;AC9XA;AAwBA;;AAEG;AACI,MAAM,uBAAuB,GAAqB;AACvD,IAAA,MAAM,EAAE,IAAI;CACb,CAAC;AAEF;;;;AAIG;AACG,SAAU,eAAe,CAAC,gBAAmC,EAAA;IACjE,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,gBAAgB,IAAI,uBAAuB,CAAC,CAAC;SAC9F;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,eAAgB,SAAQ,iBAAiB,CAAA;AACpD;;;;;;AAMG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACZ,gBAAkC,EAAA;AAEnD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFV,IAAgB,CAAA,gBAAA,GAAhB,gBAAgB,CAAkB;KAGpD;AAED;;;;;AAKG;IACI,MAAM,WAAW,CAAC,OAAwB,EAAA;QAC/C,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACjD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACzED;AAaA;;AAEG;AACH,MAAM,eAAe,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AAkBjC,MAAM,sBAAsB,GAAoB;AACrD,IAAA,eAAe,EAAE,IAAI;AACrB,IAAA,UAAU,EAAE,EAAE;CACf,CAAC;AAEF;;;;AAIG;AACa,SAAA,cAAc,CAAC,cAAc,GAAG,EAAE,EAAA;IAChD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAChE;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,cAAe,SAAQ,iBAAiB,CAAA;AACnD,IAAA,WAAA,CAAY,UAAyB,EAAE,OAA6B,EAAW,aAAa,EAAE,EAAA;AAC5F,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QADkD,IAAU,CAAA,UAAA,GAAV,UAAU,CAAK;KAE7F;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;AACpB,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAK,cAAc,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC;KAC1D;AACF,CAAA;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB,EAAA;AAEtB,IAAA,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IACrC,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;AACxD,IAAA,IACE,cAAc;SACb,MAAM,KAAK,GAAG;AACb,aAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAC5D,aAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aAC3D,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;SAChB,CAAC,MAAM,CAAC,UAAU,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,EAC1D;QACA,MAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AAC9C,QAAA,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;AAChC,QAAA,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;QAIjC,IAAI,MAAM,KAAK,GAAG,EAAE;AAClB,YAAA,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;AACrB,SAAA;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;AACpB,aAAA,IAAI,CAAC,CAAC,GAAG,KAAK,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;AACnE,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC;;ACpGA;AACA;AAIO,MAAM,0BAA0B,GAAG,CAAC,CAAC;AAC5C;AACO,MAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,MAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEpD,SAAU,QAAQ,CAAC,CAAU,EAAA;AACjC,IAAA,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;AAC/B,CAAC;AAaD;;;;;;;;AAQG;AACG,SAAU,WAAW,CACzB,UAAkB,EAClB,SAA4E,EAC5E,SAAoB,EACpB,QAAgC,EAChC,KAAkB,EAAA;AAElB,IAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,KAAK,CAAC,EAAE;AAC/B,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAED,IAAA,OAAO,SAAS,CAAC,UAAU,GAAG,UAAU,CAAC;AAC3C,CAAC;AAED;;;;;;;AAOG;SACa,eAAe,CAC7B,YAA2F,EAC3F,YAAuB,EAAE,UAAU,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,EAC1D,GAAgB,EAAA;AAEhB,IAAA,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;AACnB,YAAA,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;AAClC,SAAA;AAED,QAAA,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;AACvB,KAAA;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;AAGvB,IAAA,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/D,IAAA,MAAM,gBAAgB,GACpB,YAAY,CAAC,aAAa,GAAG,GAAG;AAChC,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,YAAY,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACjE,cAAc,IAAI,gBAAgB,CAAC;AAEnC,IAAA,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,YAAY,CAAC,gBAAgB,GAAG,cAAc,EAC9C,YAAY,CAAC,gBAAgB,CAC9B,CAAC;AAEF,IAAA,OAAO,SAAS,CAAC;AACnB;;ACtFA;AA0BA;;;;;AAKG;SACa,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EAAA;IAEzB,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACSC,2BAMX;AAND,CAAA,UAAY,SAAS,EAAA;AACnB;;;AAGG;AACH,IAAA,SAAA,CAAA,SAAA,CAAA,aAAA,CAAA,GAAA,CAAA,CAAA,GAAA,aAAW,CAAA;AACb,CAAC,EANWA,iBAAS,KAATA,iBAAS,GAMpB,EAAA,CAAA,CAAA,CAAA;AA8BM,MAAM,mBAAmB,GAAiB;AAC/C,IAAA,UAAU,EAAE,0BAA0B;AACtC,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,iBAAiB,EAAE,iCAAiC;CACrD,CAAC;AAEF;;AAEG;AACG,MAAO,sBAAuB,SAAQ,iBAAiB,CAAA;AAc3D;;;;;;;AAOG;IACH,WACE,CAAA,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EAAA;AAEzB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;AACjF,QAAA,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;AAC7F,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;KACvC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAKC,OAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAClD,KAAK,CAAC,CAAC,KAAK,KAAKA,OAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;KAC7E;AACF,CAAA;AAED,eAAeA,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB,EAAA;IAEzB,SAAS,iBAAiB,CAAC,aAAqC,EAAA;QAC9D,MAAM,UAAU,GAAG,aAAa,KAAA,IAAA,IAAb,aAAa,KAAb,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,aAAa,CAAE,MAAM,CAAC;QACzC,IAAI,UAAU,KAAK,GAAG,KAAI,QAAQ,KAAR,IAAA,IAAA,QAAQ,uBAAR,QAAQ,CAAE,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA,EAAE;AACtF,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,IACE,UAAU,KAAK,SAAS;AACxB,aAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;AACxC,YAAA,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;AACA,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;IAED,SAAS,GAAG,eAAe,CACzB;QACE,aAAa,EAAE,MAAM,CAAC,aAAa;AACnC,QAAA,gBAAgB,EAAE,CAAC;QACnB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;AAC1C,KAAA,EACD,SAAS,EACT,YAAY,CACb,CAAC;IAEF,MAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;AAC1F,IAAA,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;QACxF,MAAM,CAAC,IAAI,CAAC,CAAA,oBAAA,EAAuB,SAAS,CAAC,aAAa,CAAE,CAAA,CAAC,CAAC;QAC9D,IAAI;AACF,YAAA,MAAMC,cAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACrC,YAAA,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;YAClE,OAAOD,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC;AAC/C,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,OAAOA,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;AACzD,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;AAEjD,QAAA,MAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;AACJ,QAAA,MAAM,GAAG,CAAC;AACX,KAAA;AAAM,SAAA;AACL,QAAA,OAAO,QAAQ,CAAC;AACjB,KAAA;AACH;;AC1MA;AA8CA;;;;AAIG;AACa,SAAA,SAAS,CAAC,cAAA,GAAmC,EAAE,EAAA;IAC7D,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAC3D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,SAAU,SAAQ,iBAAiB,CAAA;AA4C9C,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,UACEE,QAAM,GAAGC,MAAU,CAAC,IAAI,EACxB,kBAAkB,GAAG,EAAE,EACvB,sBAAsB,GAAG,EAAE,MACP,EAAE,EAAA;AAExB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,MAAM,GAAGD,QAAM,CAAC;AACrB,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,CAAC,CAAC;KAChF;AApDD;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;KAC1C;AAED;;;;;;AAMG;IACH,IAAW,kBAAkB,CAAC,kBAA+B,EAAA;AAC3D,QAAA,IAAI,CAAC,SAAS,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;KACxD;AAED;;;;AAIG;AACH,IAAA,IAAW,sBAAsB,GAAA;AAC/B,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC;KAC9C;AAED;;;;AAIG;IACH,IAAW,sBAAsB,CAAC,sBAAmC,EAAA;AACnE,QAAA,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;KAChE;AAgBM,IAAA,WAAW,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAEvE,QAAA,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,KAAK,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;KAC7F;AAEO,IAAA,UAAU,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,MAAM,CAAC,CAAA,SAAA,EAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA,CAAE,CAAC,CAAC;KAC7D;AAEO,IAAA,WAAW,CAAC,QAA+B,EAAA;QACjD,IAAI,CAAC,MAAM,CAAC,CAAA,sBAAA,EAAyB,QAAQ,CAAC,MAAM,CAAE,CAAA,CAAC,CAAC;AACxD,QAAA,IAAI,CAAC,MAAM,CAAC,CAAY,SAAA,EAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA,CAAE,CAAC,CAAC;AACrE,QAAA,OAAO,QAAQ,CAAC;KACjB;AACF;;ACxID;AACA;AAqDA;;;;AAIG;AACG,SAAU,0BAA0B,CAAC,SAA6B,EAAA;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAEe,SAAA,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc,EAAA;AAEd,IAAA,IAAI,MAAc,CAAC;AACnB,IAAA,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;AACxB,KAAA;AAAM,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;AACvC,QAAA,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;AC5EA;AAiGA;;;AAGG;AACG,SAAU,4BAA4B,CAAC,aAA4B,EAAA;AACvE,IAAA,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAC;AACjC,IAAA,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,MAAM,iBAAiB,GAAG,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC9D,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AAChC,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;ACjHA;SAOgB,sBAAsB,GAAA;AACpC,IAAA,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;SAEe,uBAAuB,GAAA;AACrC,IAAA,MAAM,WAAW,GAAG;AAClB,QAAA,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;AAEF,IAAA,MAAM,MAAM,GAAG;AACb,QAAA,GAAG,EAAE,IAAI;AACT,QAAA,KAAK,EAAE,CAAI,CAAA,EAAAE,aAAE,CAAC,IAAI,EAAE,CAAI,CAAA,EAAAA,aAAE,CAAC,IAAI,EAAE,CAAI,CAAA,EAAAA,aAAE,CAAC,OAAO,EAAE,CAAG,CAAA,CAAA;KACrD,CAAC;AAEF,IAAA,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B;;ACvBA;AA+BA,SAAS,cAAc,GAAA;AACrB,IAAA,MAAM,aAAa,GAAG;AACpB,QAAA,GAAG,EAAE,WAAW;QAChB,KAAK,EAAE,SAAS,CAAC,eAAe;KACjC,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAY,GAAG,GAAG,EAClB,cAAc,GAAG,GAAG,EAAA;AAEpB,IAAA,OAAO,aAAa;AACjB,SAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,CAAA,EAAG,cAAc,CAAG,EAAA,IAAI,CAAC,KAAK,CAAA,CAAE,GAAG,EAAE,CAAC;AACjE,QAAA,OAAO,GAAG,IAAI,CAAC,GAAG,CAAG,EAAA,KAAK,EAAE,CAAC;AAC/B,KAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAEM,MAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE;;;AAGG;SACa,wBAAwB,GAAA;AACtC,IAAA,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;AACrC,IAAA,MAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,MAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;AAC/E,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;AAIG;AACG,SAAU,eAAe,CAAC,aAA6B,EAAA;AAC3D,IAAA,MAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,KAAK,IAAI;UAC3E,sBAAsB,EAAE;AAC1B,UAAE,aAAa,CAAC,GAAG,CAAC;AACxB,IAAA,MAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,KAAK,SAAS,IAAI,aAAa,CAAC,KAAK,KAAK,IAAI;UAC/E,wBAAwB,EAAE;AAC5B,UAAE,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,eAAgB,SAAQ,iBAAiB,CAAA;AACpD,IAAA,WAAA,CACW,WAA0B,EAC1B,QAA8B,EAC7B,SAAiB,EACjB,WAAmB,EAAA;AAE7B,QAAA,KAAK,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QALpB,IAAW,CAAA,WAAA,GAAX,WAAW,CAAe;QAC1B,IAAQ,CAAA,QAAA,GAAR,QAAQ,CAAsB;QAC7B,IAAS,CAAA,SAAA,GAAT,SAAS,CAAQ;QACjB,IAAW,CAAA,WAAA,GAAX,WAAW,CAAQ;KAG9B;AAED,IAAA,WAAW,CAAC,OAAwB,EAAA;AAClC,QAAA,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AAED;;AAEG;AACH,IAAA,kBAAkB,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,YAAA,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;AACrC,SAAA;AAED,QAAA,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;AAC5D,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AACvD,SAAA;KACF;AACF;;ACtHD;AACA;AAEA;;AAEG;AACSC,uCAqBX;AArBD,CAAA,UAAY,qBAAqB,EAAA;AAC/B;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,GAAS,CAAA;AACT;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,GAAS,CAAA;AACT;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,IAAU,CAAA;AACV;;AAEG;AACH,IAAA,qBAAA,CAAA,OAAA,CAAA,GAAA,GAAW,CAAA;AACX;;AAEG;AACH,IAAA,qBAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACjB,CAAC,EArBWA,6BAAqB,KAArBA,6BAAqB,GAqBhC,EAAA,CAAA,CAAA;;AC3BD;AA8CA;AACO,MAAM,sBAAsB,GAAuB;AACxD,IAAA,uBAAuB,EAAE,IAAI;AAC7B,IAAA,iBAAiB,EAAE,IAAI;AACvB,IAAA,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;AAYG;AACH,eAAe,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB,EAAA;;;AAInB,IAAA,eAAe,iBAAiB,GAAA;AAC9B,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;AAC/B,aAAA;YAAC,OAAM,EAAA,EAAA;AACN,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;;YAG1C,IAAI,UAAU,KAAK,IAAI,EAAE;AACvB,gBAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,OAAO,UAAU,CAAC;AACnB,SAAA;KACF;AAED,IAAA,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;AACrB,QAAA,MAAMJ,cAAK,CAAC,iBAAiB,CAAC,CAAC;AAE/B,QAAA,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;AACnC,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;AAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD,EAAA;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;AAErC,IAAA,MAAM,OAAO,GACR,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,sBAAsB,CACtB,EAAA,kBAAkB,CACtB,CAAC;AAEF;;;AAGG;AACH,IAAA,MAAM,MAAM,GAAG;AACb;;AAEG;AACH,QAAA,IAAI,YAAY,GAAA;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;SAC/B;AACD;;;AAGG;AACH,QAAA,IAAI,aAAa,GAAA;;AACf,YAAA,QACE,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,CAAA,EAAA,GAAA,KAAK,KAAL,IAAA,IAAA,KAAK,uBAAL,KAAK,CAAE,kBAAkB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,CAAC,IAAI,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzE;SACH;AACD;;;AAGG;AACH,QAAA,IAAI,WAAW,GAAA;AACb,YAAA,QACE,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzF;SACH;KACF,CAAC;AAEF;;;AAGG;IACH,SAAS,OAAO,CAAC,eAAgC,EAAA;;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;AAExB,YAAA,MAAM,iBAAiB,GAAG,MACxB,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;;;AAI/C,YAAA,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;;AAEzB,YAAA,CAAA,EAAA,GAAA,KAAK,KAAA,IAAA,IAAL,KAAK,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAL,KAAK,CAAE,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,IAAI,CAAC,GAAG,EAAE,CACxC;AACE,iBAAA,IAAI,CAAC,CAAC,MAAM,KAAI;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;AACf,gBAAA,OAAO,KAAK,CAAC;AACf,aAAC,CAAC;AACD,iBAAA,KAAK,CAAC,CAAC,MAAM,KAAI;;;;gBAIhB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;AACb,gBAAA,MAAM,MAAM,CAAC;AACf,aAAC,CAAC,CAAC;AACN,SAAA;AAED,QAAA,OAAO,aAAqC,CAAC;KAC9C;AAED,IAAA,OAAO,OAAO,YAA6B,KAA0B;;;;;;;;;;QAWnE,IAAI,MAAM,CAAC,WAAW;AAAE,YAAA,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;AACvB,SAAA;AAED,QAAA,OAAO,KAAoB,CAAC;AAC9B,KAAC,CAAC;AACJ,CAAC;AAED;AAEA;;;;;;AAMG;AACa,SAAA,+BAA+B,CAC7C,UAA2B,EAC3B,MAAyB,EAAA;;IAGzB,MAAM,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,iBAAiB,CAAC;IAEvE,MAAM,+BAAgC,SAAQ,iBAAiB,CAAA;QAC7D,WAAmB,CAAA,UAAyB,EAAE,OAA6B,EAAA;AACzE,YAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC5B;QAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,QAAQ,CAAC;gBAC/B,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,gBAAA,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,iBAAA;AACF,aAAA,CAAC,CAAC;AACH,YAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,CAAA,OAAA,EAAU,KAAK,CAAA,CAAE,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;SAClD;AACF,KAAA;IAED,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,+BAA+B,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACjE;KACF,CAAC;AACJ;;ACxQA;AAYA;;;AAGG;SACa,kCAAkC,GAAA;IAChD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,kCAAkC,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAED;;;AAGG;AACG,MAAO,kCAAmC,SAAQ,iBAAiB,CAAA;AACvE;;;;;AAKG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;;;;AAKG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,OAAO,CAAC,kBAAkB,GAAG,KAAK,CAAC;QACnC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACnDD;AAYA;;;AAGG;AACa,SAAA,6BAA6B,CAC3C,mBAAmB,GAAG,wBAAwB,EAAA;IAE9C,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAEK,MAAO,6BAA8B,SAAQ,iBAAiB,CAAA;AAClE,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACrB,oBAA4B,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFnB,IAAoB,CAAA,oBAAA,GAApB,oBAAoB,CAAQ;KAGrC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;AACxD,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;AACnE,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACzCD;AAMA,IAAI,gBAAwC,CAAC;SAE7B,0BAA0B,GAAA;IACxC,IAAI,CAAC,gBAAgB,EAAE;AACrB,QAAA,gBAAgB,GAAG,IAAIK,mBAAiB,EAAE,CAAC;AAC5C,KAAA;AAED,IAAA,OAAO,gBAAgB,CAAC;AAC1B;;ACdA;SAegB,YAAY,GAAA;IAC1B,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC9C;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACH,MAAM,YAAa,SAAQ,iBAAiB,CAAA;AAC1C;;AAEG;IACH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;AAEG;IACI,MAAM,WAAW,CAAC,OAAwB,EAAA;;AAE/C,QAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACpE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACtC,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACzE,aAAA;AACF,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;AC/CD;AAgBA;;;AAGG;AACI,MAAM,iBAAiB,GAAa,EAAE,CAAC;AAC9C,IAAI,iBAAiB,GAAY,KAAK,CAAC;AAEvC;AACA,MAAM,iBAAiB,GAAyB,IAAI,GAAG,EAAE,CAAC;AAE1D,SAAS,yBAAyB,GAAA;IAChC,IAAI,CAAC,OAAO,EAAE;AACZ,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,MAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;AAE5D,IAAA,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;;;;AAIG;AACH,SAAS,UAAU,CACjB,GAAW,EACX,WAAqB,EACrB,WAAkC,EAAA;AAElC,IAAA,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IACD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,WAAW,KAAX,IAAA,IAAA,WAAW,KAAX,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,WAAW,CAAE,GAAG,CAAC,IAAI,CAAC,EAAE;AAC1B,QAAA,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC9B,KAAA;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;AAC3B,IAAA,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;AACjC,QAAA,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;;;AAGtB,YAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;AACvB,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;AACvB,iBAAA;AACF,aAAA;AACF,SAAA;AAAM,aAAA;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;AACvB,aAAA;AACF,SAAA;AACF,KAAA;IACD,WAAW,KAAA,IAAA,IAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;AACvC,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;AAEG;SACa,WAAW,GAAA;IACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,iBAAiB,GAAG,IAAI,CAAC;AACzB,IAAA,IAAI,OAAO,EAAE;AACX,QAAA,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,EAAE,CAAC;aAC1B,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,CAAC;AAClC,KAAA;AAED,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;AAIG;AACG,SAAU,uBAAuB,CAAC,QAAiB,EAAA;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACF,KAAA;AAED,IAAA,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5E,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;AACnD,IAAA,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,GAAG,EAAE,CAAC;IAC1E,OAAO;AACL,QAAA,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ;QACR,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;;;;;AAMG;AACa,SAAA,WAAW,CACzB,aAA6B,EAC7B,OAGC,EAAA;IAED,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;AAC3C,KAAA;IACD,IAAI,CAAC,iBAAiB,EAAE;AACtB,QAAA,iBAAiB,CAAC,IAAI,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;AAC1C,KAAA;IACD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,oBAA0C,KAAI;AAChF,YAAA,OAAO,IAAI,WAAW,CACpB,UAAU,EACV,oBAAoB,EACpB,aAAc,EACd,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,iBAAiB,CAC3B,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW,EAAA;IAKrC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACjC,IAAA,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;AAClB,QAAA,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;AAChC,KAAA;IAED,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACvC,IAAA,MAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,GAAG,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC;IAC3D,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACrC,IAAA,MAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;AACtC,IAAA,MAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,GAAG,IAAI,CAAC;AACpE,IAAA,MAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;AAC1E,IAAA,MAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ;QACR,QAAQ;QACR,cAAc;KACf,CAAC;AACJ,CAAC;AAEK,MAAO,WAAY,SAAQ,iBAAiB,CAAA;AAChD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACtB,aAA4B,EAC3B,iBAA4B,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAHpB,IAAa,CAAA,aAAA,GAAb,aAAa,CAAe;QAC3B,IAAiB,CAAA,iBAAA,GAAjB,iBAAiB,CAAW;KAGrC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;;QACzC,IACE,CAAC,OAAO,CAAC,aAAa;YACtB,CAAC,UAAU,CACT,OAAO,CAAC,GAAG,EACX,CAAA,EAAA,GAAA,IAAI,CAAC,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,iBAAiB,EAC3C,IAAI,CAAC,iBAAiB,GAAG,SAAS,GAAG,iBAAiB,CACvD,EACD;AACA,YAAA,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;AAC5C,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACrMD;AAcgB,SAAA,oBAAoB,CAAC,YAAY,GAAG,EAAE,EAAA;IACpD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAEK,MAAO,oBAAqB,SAAQ,iBAAiB,CAAA;AACzD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACpB,gBAAgB,EAAE,EAAA;AAE3B,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFlB,IAAa,CAAA,aAAA,GAAb,aAAa,CAAK;KAG5B;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAK,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;KAClE;AACF,CAAA;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B,EAAA;AAE/B,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;AACxE,QAAA,IAAI,MAAM,EAAE;YACV,MAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;AAI3C,iBAAA,KAAK,CAAC,MAAM,KAAK,CAAC;AAClB,iBAAA,IAAI,CAAC,CAAC,kBAAkB,KAAI;AAC3B,gBAAA,IAAI,kBAAkB,EAAE;;;AAGtB,oBAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEC,YAAkB,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,iBAAA;AACD,gBAAA,OAAO,QAAQ,CAAC;aACjB,CAAC,EACJ;AACH,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;AAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAW,GAAG,KAAK,EAAA;AAEnB,IAAA,MAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;AAC5D,IAAA,IAAI,WAAW,EAAE;AACf,QAAA,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AACtC,KAAA;;;AAID,IAAA,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;IAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;AAE1E,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;AAKG;AACH,SAAS,yBAAyB,CAAC,IAAY,EAAA;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;AACzB,IAAA,IAAI,IAAI,EAAE;QACR,IAAI;AACF,YAAA,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;;AAElB,SAAA;AACD,QAAA,IACE,YAAY;AACZ,YAAA,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;AACvB,YAAA,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;AACA,YAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC/D,YAAA,IAAI,QAAQ,EAAE;AACZ,gBAAA,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;AACzB,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;AAKG;AACH,SAAS,sBAAsB,CAAC,GAAW,EAAA;AACzC,IAAA,IAAI,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;AAChE,IAAA,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;AAC3B,QAAA,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;AACtB,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,GAAG,CAAA,CAAA,CAAG,CAAC,CAAC;AAClF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;AAOG;AACH,eAAe,UAAU,CACvB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC,EAAA;AAEhC,IAAA,MAAM,OAAO,GAAG,CAAA,EAAG,SAAS,CAAa,UAAA,EAAA,QAAQ,kCAAkC,CAAC;AACpF,IAAA,MAAM,MAAM,GAAG,CAAA,EAAG,SAAS,CAAa,UAAA,EAAA,QAAQ,yBAAyB,CAAC;AAC1E,IAAA,MAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;AACzD,IAAA,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;AAC3B,IAAA,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AAClE,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;AAC3B,QAAA,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,CAAA,yCAAA,CAA2C,CAAC,CAAC;AAC7F,KAAA;IACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;AAChE,CAAC;AAED;;;;;;;;AAQG;AACH,eAAe,qBAAqB,CAClC,MAA4B,EAC5B,GAAW,EACX,eAAgC,EAAA;AAEhC,IAAA,MAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;AAC9D,IAAA,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;AACrB,IAAA,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AAC7D,IAAA,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC;AAC3B,IAAA,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;AACrF,QAAA,OAAO,IAAI,CAAC;AACb,KAAA;AAAM,SAAA;QACL,MAAMN,cAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,CAAC;QACzC,OAAO,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;AAC5D,KAAA;AACH;;AClMA;AAaA;;;;AAIG;AACG,SAAU,aAAa,CAC3B,sBAAgD,EAAA;IAEhD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;SACvE;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,aAAc,SAAQ,iBAAiB,CAAA;AAClD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACtB,sBAAgD,EAAA;AAEvD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFpB,IAAsB,CAAA,sBAAA,GAAtB,sBAAsB,CAA0B;KAGxD;AAED,IAAA,WAAW,CAAC,OAAwB,EAAA;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KACzD;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,KAChD,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAC1C,CAAC;KACH;AACF;;ACjDD;AAwBA;;;;;;;AAOG;AACG,SAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB,EAAA;IAEzB,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;;;;;AAMG;AACG,MAAO,sBAAuB,SAAQ,iBAAiB,CAAA;IAM3D,WACE,CAAA,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB,EAAA;AAEzB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;AACjF,QAAA,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;AAC7F,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;AACtC,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;KACvC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,KAAK,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;KAClE;AACF,CAAA;AAED,eAAe,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB,EAAA;IAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;AAEpD,IAAA,SAAS,iBAAiB,CAAC,SAAiC,EAAE,KAAkB,EAAA;AAC9E,QAAA,IACE,KAAK;AACL,YAAA,KAAK,CAAC,IAAI;AACV,aAAC,KAAK,CAAC,IAAI,KAAK,WAAW;gBACzB,KAAK,CAAC,IAAI,KAAK,iBAAiB;gBAChC,KAAK,CAAC,IAAI,KAAK,cAAc;gBAC7B,KAAK,CAAC,IAAI,KAAK,YAAY;AAC3B,gBAAA,KAAK,CAAC,IAAI,KAAK,QAAQ,CAAC,EAC1B;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AAED,IAAA,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAE;;QAExF,IAAI;AACF,YAAA,MAAMA,cAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,SAAA;AAAC,QAAA,OAAO,SAAc,EAAE;AACvB,YAAA,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;AACxE,SAAA;AACF,KAAA;AAAM,SAAA;AACL,QAAA,IAAI,GAAG,EAAE;;YAEP,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;AACxC,SAAA;AACD,QAAA,OAAO,iBAAiB,CAAC;AAC1B,KAAA;AACH;;AClIA;AACA;AAEA;;AAEG;AACI,MAAM,8BAA8B,GAAG,CAAC;;ACN/C;AAoBA,MAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AAExD;;;;;;;;;AASG;SACa,qBAAqB,GAAA;IACnC,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACvD;KACF,CAAC;AACJ,CAAC;AAED,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;AAQG;AACG,MAAO,qBAAsB,SAAQ,iBAAiB,CAAA;AAI1D,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,eAAiC,EAAA;AAEjC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAPrB,IAAe,CAAA,eAAA,GAAG,CAAC,CAAC;QAQ1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,IAAI,CAAC,uBAAuB,CAAC;KACxE;IAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC;AACzE,QAAA,IACE,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe;AAC/C,YAAA,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,kBAAkB,EAClD;AACA,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAM,aAAA;YACL,OAAO,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AACpD,SAAA;KACF;AAEO,IAAA,MAAM,uBAAuB,CACnC,WAA4B,EAC5B,YAAmC,EAAA;;AAEnC,QAAA,MAAM,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;AAEF,QAAA,IAAI,gBAAgB,EAAE;YACpB,MAAM,SAAS,GACb,qBAAqB,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;AAChE,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;gBAE1B,MAAMA,cAAK,CAAC,SAAS,EAAE;oBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,oBAAA,aAAa,EAAE,oBAAoB;AACpC,iBAAA,CAAC,CAAC;AAEH,gBAAA,IAAI,MAAA,WAAW,CAAC,WAAW,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,OAAO,EAAE;AACpC,oBAAA,MAAM,IAAIV,0BAAU,CAAC,oBAAoB,CAAC,CAAC;AAC5C,iBAAA;AAED,gBAAA,IAAI,IAAI,CAAC,eAAe,GAAG,8BAA8B,EAAE;AACzD,oBAAA,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AACtC,iBAAA;AAAM,qBAAA;oBACL,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAClD,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,YAAY,CAAC;KACrB;IAEM,OAAO,qBAAqB,CAAC,WAAmB,EAAA;AACrD,QAAA,MAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;AAChD,QAAA,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;AACrC,YAAA,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;AACrE,SAAA;AAAM,aAAA;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;AACnC,SAAA;KACF;IAEM,OAAO,yBAAyB,CAAC,WAAmB,EAAA;QACzD,IAAI;AACF,YAAA,MAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC7C,YAAA,MAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;AAExB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;AAC9C,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;KACF;AACF;;AClID;AAqBA,MAAM,UAAU,GAAGiB,8BAAkB,CAAC;AACpC,IAAA,aAAa,EAAE,EAAE;AACjB,IAAA,SAAS,EAAE,EAAE;AACd,CAAA,CAAC,CAAC;AAYH;;;;AAIG;AACa,SAAA,aAAa,CAAC,cAAA,GAAuC,EAAE,EAAA;IACrE,OAAO;QACL,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;YAC7D,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAC/D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,aAAc,SAAQ,iBAAiB,CAAA;AAGlD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,cAAoC,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,SAAS,GAAG,cAAc,CAAC,SAAS,CAAC;KAC3C;IAEM,MAAM,WAAW,CAAC,OAAwB,EAAA;AAC/C,QAAA,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC7D,YAAA,IAAI,CAAC,kBAAkB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AACxC,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;AAChC,YAAA,MAAM,GAAG,CAAC;AACX,SAAA;KACF;AAED,IAAA,aAAa,CAAC,OAAwB,EAAA;;QACpC,IAAI;;;YAGF,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,CAAA,KAAA,EAAQ,OAAO,CAAC,MAAM,CAAA,CAAE,EAAE;AACpD,gBAAA,cAAc,EAAE;oBACd,WAAW,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACL,OAAe,CAAC,WAAW,CAAA,EAAA,EAC/B,IAAI,EAAEC,oBAAQ,CAAC,MAAM,EACtB,CAAA;oBACD,cAAc,EAAE,OAAO,CAAC,cAAc;AACvC,iBAAA;AACF,aAAA,CAAC,CAAC;;AAGH,YAAA,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACvB,IAAI,CAAC,GAAG,EAAE,CAAC;AACX,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;AAED,YAAA,MAAM,oBAAoB,GAAG,CAAA,EAAA,GAAA,OAAO,CAAC,cAAc,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;AAE1F,YAAA,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;AAC5C,gBAAA,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,oBAAoB,CAAC,CAAC;AACzD,aAAA;YAED,IAAI,CAAC,aAAa,CAAC;gBACjB,aAAa,EAAE,OAAO,CAAC,MAAM;gBAC7B,UAAU,EAAE,OAAO,CAAC,GAAG;gBACvB,SAAS,EAAE,OAAO,CAAC,SAAS;AAC7B,aAAA,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AACtD,aAAA;;AAGD,YAAA,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;AACvC,YAAA,MAAM,iBAAiB,GAAGC,gCAAoB,CAAC,WAAW,CAAC,CAAC;AAC5D,YAAA,IAAI,iBAAiB,IAAIC,8BAAkB,CAAC,WAAW,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,iBAAiB,CAAC,CAAC;AACtD,gBAAA,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC;;AAEhF,gBAAA,IAAI,UAAU,EAAE;oBACd,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAC/C,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACrF,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;KACF;IAEO,eAAe,CAAC,IAAU,EAAE,GAAQ,EAAA;QAC1C,IAAI;YACF,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;AACrB,aAAA,CAAC,CAAC;YAEH,IAAI,GAAG,CAAC,UAAU,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;AACvD,aAAA;YACD,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACtF,SAAA;KACF;IAEO,kBAAkB,CAAC,IAAU,EAAE,QAA+B,EAAA;QACpE,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;YACvD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;AACjE,YAAA,IAAI,gBAAgB,EAAE;AACpB,gBAAA,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACzD,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,EAAE;AACxB,aAAA,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACtF,SAAA;KACF;AACF;;AC1KD;AAkKA;;AAEG;MACU,aAAa,CAAA;AAsBxB;;;;AAIG;AACH,IAAA,WAAA,CACE,WAAwD;;IAExD,OAA8B,EAAA;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,CAAC;QACtE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;AAElF,QAAA,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;AACjD,YAAA,MAAM,CAAC,IAAI,CAAC,8CAA8C,CAAC,CAAC;AAC5D,YAAA,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;AACzD,SAAA;AAAM,aAAA;YACL,IAAI,iBAAiB,GAAqC,SAAS,CAAC;AACpE,YAAA,IAAIC,0BAAiB,CAAC,WAAW,CAAC,EAAE;AAClC,gBAAA,MAAM,CAAC,IAAI,CACT,sFAAsF,CACvF,CAAC;;;;;;;gBAOF,MAAM,oBAAoB,GAA+B,MAAK;oBAC5D,IAAI,wBAAwB,GAAqC,SAAS,CAAC;;oBAE3E,MAAM,aAAa,GAAG,IAAI,CAAC;oBAC3B,MAAM,oBAAoB,GAAG,OAAO,CAAC;oBACrC,OAAO;wBACL,MAAM,CAAC,UAAyB,EAAE,aAAmC,EAAA;4BACnE,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,oBAAoB,EACpB,aAAa,CAAC,OAAO,CACtB,CAAC;4BAEF,IAAI,CAAC,gBAAgB,EAAE;AACrB,gCAAA,MAAM,IAAI,KAAK,CACb,CAAA,iKAAA,CAAmK,CACpK,CAAC;AACH,6BAAA;AAED,4BAAA,IAAI,wBAAwB,KAAK,SAAS,IAAI,wBAAwB,KAAK,IAAI,EAAE;AAC/E,gCAAA,wBAAwB,GAAG,+BAA+B,CACxD,WAAW,EACX,gBAAgB,CACjB,CAAC;AACH,6BAAA;4BAED,OAAO,wBAAwB,CAAC,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;yBACnE;qBACF,CAAC;AACJ,iBAAC,CAAC;gBAEF,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;AAC5C,aAAA;iBAAM,IAAI,WAAW,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;AACvE,gBAAA,MAAM,CAAC,IAAI,CAAC,kEAAkE,CAAC,CAAC;AAChF,gBAAA,iBAAiB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;AAChD,aAAA;AAAM,iBAAA,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;AAC5D,gBAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,aAAA;AAED,YAAA,MAAM,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC;AAC7D,YAAA,sBAAsB,GAAG,mCAAmC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;YACzF,IAAI,OAAO,CAAC,sBAAsB,EAAE;;;gBAGlC,MAAM,yBAAyB,GAC7B,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;AACzD,gBAAA,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;AACpD,iBAAA;AACF,aAAA;AACF,SAAA;AACD,QAAA,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;KACvD;AAED;;AAEG;AACH,IAAA,WAAW,CAAC,OAAgD,EAAA;AAC1D,QAAA,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AAC5E,YAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,SAAA;AAED,QAAA,IAAI,WAA4B,CAAC;QACjC,IAAI;AACF,YAAA,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;AACvB,aAAA;AAAM,iBAAA;AACL,gBAAA,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AAChC,gBAAA,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAC5C,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC9B,SAAA;AAED,QAAA,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3E,YAAA,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;AACjE,gBAAA,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,MAAM,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B,EAAA;;AAE/B,QAAA,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;AACpD,YAAA,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;AACtC,YAAA,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;AACxC,SAAA;QAED,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,iBAAiB,CAAC;AACxE,QAAA,MAAM,WAAW,GAAoB,IAAI,WAAW,EAAE,CAAC;AAEvD,QAAA,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,MAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;AACZ,gBAAA,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;AACH,aAAA;AAED,YAAA,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;AAC9C,YAAA,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,MAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;AACtB,gBAAA,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AAC3C,aAAA;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;AACzE,gBAAA,KAAK,MAAM,YAAY,IAAI,aAAa,CAAC,aAAa,EAAE;AACtD,oBAAA,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,EACxC,iBAAiB,CAClB,CAAC;AACF,oBAAA,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;AAC9B,wBAAA,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;AAC3D,qBAAA;AACD,oBAAA,UAAU,CAAC,UAAU,CACnB,IAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,GAAG,EACrF,iBAAiB,CAClB,CAAC;AACH,iBAAA;AACF,aAAA;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;AAC7E,gBAAA,KAAK,MAAM,cAAc,IAAI,aAAa,CAAC,eAAe,EAAE;AAC1D,oBAAA,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,oBAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,mBAAmB,KAAK,IAAI,EAAE;wBACrE,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,EAC1C,iBAAiB,CAClB,CAAC;AACF,wBAAA,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;AAC7C,4BAAA,cAAc,CAAC,gBAAgB,KAAK,IAAI,EACxC;AACA,4BAAA,IAAI,cAAc,CAAC,gBAAgB,KAAKR,6BAAqB,CAAC,KAAK,EAAE;AACnE,gCAAA,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;;oCAEpC,SAAS;AACV,iCAAA;AAAM,qCAAA;AACL,oCAAA,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;AACvC,wCAAA,MAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC;AACxB,4CAAA,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;AAC9D,qCAAA;AACF,iCAAA;AACF,6BAAA;AAAM,iCAAA,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;AAC7D,gCAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACjF,6BAAA;AACF,yBAAA;AACD,wBAAA,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;AAChC,4BAAA,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;AACtC,gCAAA,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;AACvC,oCAAA,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;AACxC,wCAAA,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7E,qCAAA;AACF,iCAAA;AACF,6BAAA;AAAM,iCAAA;AACL,gCAAA,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;AAC/D,6BAAA;AACF,yBAAA;AACD,wBAAA,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI;AACxC,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;AAC/D,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;AAC7D,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACjF,yBAAA;AACD,wBAAA,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;AACH,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,MAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;AACzE,YAAA,IAAI,WAAW,IAAI,aAAa,CAAC,WAAW,EAAE;gBAC5C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;AACtD,aAAA;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;AAClC,gBAAA,KAAK,MAAM,eAAe,IAAI,aAAa,CAAC,gBAAgB,EAAE;AAC5D,oBAAA,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,oBAAA,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;wBACrD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,EAC3C,iBAAiB,CAClB,CAAC;AACF,wBAAA,MAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;AACxE,6BAAA,sBAAsB,CAAC;AAC1B,wBAAA,IAAI,sBAAsB,EAAE;4BAC1B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AAC1C,gCAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AACzE,6BAAA;AACF,yBAAA;AAAM,6BAAA;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;AACnC,gCAAA,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;AACH,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AAED,YAAA,MAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;AAC3E,YAAA,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;AACzB,oBAAA,KAAK,MAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;AACpD,wBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;AACpF,qBAAA;AACF,iBAAA;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,oBAAA,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AAC/C,iBAAA;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;AACnB,oBAAA,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACvC,iBAAA;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,oBAAA,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;AACzD,iBAAA;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;AAC9B,oBAAA,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;AAC7D,iBAAA;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;;AAEtB,oBAAA,WAAmB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACxD,iBAAA;gBAED,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,oBAAA,WAAW,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;AACrD,iBAAA;gBAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,SAAS,IAAI,OAAO,CAAC,iBAAiB,KAAK,IAAI,EAAE;AACjF,oBAAA,WAAW,CAAC,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC;AAC3D,iBAAA;AACF,aAAA;AAED,YAAA,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;AAE3E,YAAA,IAAI,WAAW,CAAC,yBAAyB,KAAK,SAAS,EAAE;AACvD,gBAAA,WAAW,CAAC,yBAAyB,GAAG,4BAA4B,CAAC,aAAa,CAAC,CAAC;AACrF,aAAA;AAED,YAAA,IAAI,WAAkC,CAAC;AACvC,YAAA,IAAI,gBAAgB,CAAC;YACrB,IAAI;gBACF,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AACnD,aAAA;AAAC,YAAA,OAAO,KAAU,EAAE;gBACnB,gBAAgB,GAAG,KAAK,CAAC;AAC1B,aAAA;AACD,YAAA,IAAI,gBAAgB,EAAE;gBACpB,IAAI,gBAAgB,CAAC,QAAQ,EAAE;AAC7B,oBAAA,gBAAgB,CAAC,OAAO,GAAG,eAAe,CACxC,gBAAgB,CAAC,QAAQ,EACzB,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,UAAU,CAAC;AAClD,wBAAA,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CACrC,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3C,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,GAAG,OAAO,CAAC,OAAO,CACtB,eAAe,CAAC,WAAY,EAAE,aAAa,CAAC,SAAS,CAAC,WAAY,CAAC,MAAM,CAAC,CAAC,CAC5E,CAAC;AACH,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAChC,SAAA;QAED,MAAM,EAAE,GAAG,QAAQ,CAAC;AACpB,QAAA,IAAI,EAAE,EAAE;YACN,MAAM;iBACH,IAAI,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC;iBACvF,KAAK,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5B,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;AAEK,SAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAAA;;IAE5B,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AAC9E,IAAA,MAAM,cAAc,GAAgC;AAClD,QAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,QAAQ,mCAAI,EAAE;AAC1C,QAAA,WAAW,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,WAAW,mCAAI,KAAK;AACnD,QAAA,UAAU,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,UAAU,mCAAI,WAAW;KACxD,CAAC;AAEF,IAAA,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;IAChD,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;AACjE,QAAA,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;AAEF,QAAA,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;AACpD,QAAA,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAC3F,UAAU,CAAC;AACb,QAAA,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QAEtC,IAAI;AACF,YAAA,IAAI,CAAC,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,IAAI,KAAK,QAAQ,EAAE;gBAC7E,MAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;AACF,gBAAA,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,EAC9B,cAAc,CACf,CAAC;AAEF,gBAAA,MAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAEhD,IAAI,aAAa,CAAC,KAAK,EAAE;AACvB,oBAAA,MAAM,QAAQ,GAAG,kBAAkB,GAAG,CAAS,MAAA,EAAA,kBAAkB,CAAE,CAAA,GAAG,OAAO,CAAC;AAC9E,oBAAA,MAAM,KAAK,GAAG,wBAAwB,CACpC,YAAY,EACZ,QAAQ,EACR,QAAQ,EACR,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACF,oBAAA,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BS,kBAAwB,CACtB,KAAK,EACL,cAAc,IAAI,OAAO,IAAI,cAAe,EAC5C,QAAQ,EACR,YAAY,CACb,EACD;4BACE,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;AACX,yBAAA,CACF,CAAC;AACH,qBAAA;yBAAM,IAAI,CAAC,QAAQ,EAAE;AACpB,wBAAA,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,KAAK,EAAE;4BACrC,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;AACX,yBAAA,CAAC,CAAC;AACJ,qBAAA;AACF,iBAAA;AAAM,qBAAA,IACL,QAAQ,KAAK,UAAU,CAAC,MAAM;AAC9B,qBAAC,CAAA,CAAA,EAAA,GAAA,aAAa,CAAC,WAAW,0CAAE,KAAK,CAAC,YAAY,CAAC,KAAI,aAAa,CAAC,SAAS,KAAK,MAAM,CAAC,EACtF;;;oBAGA,OAAO;AACR,iBAAA;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;AACrD,iBAAA;AACF,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CACb,CAAA,OAAA,EAAU,KAAK,CAAC,OAAO,2CAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,CAAA,CAAA,CAAG,CACL,CAAC;AACH,SAAA;AACF,KAAA;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;AAC1F,QAAA,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;AAC1B,QAAA,KAAK,MAAM,iBAAiB,IAAI,aAAa,CAAC,kBAAkB,EAAE;AAChE,YAAA,MAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,YAAA,IAAI,sBAAsB,KAAK,SAAS,IAAI,sBAAsB,KAAK,IAAI,EAAE;AAC3E,gBAAA,MAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,EAC7C,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AACH,CAAC;AAED;;AAEG;AACH,SAAS,wBAAwB,CAC/B,YAAgC,EAChC,QAAgB,EAChB,QAAgB,EAChB,eAAoB,EACpB,OAAoC,EAAA;;;AAIpC,IAAA,IAAI,YAAY,IAAI,CAAC,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC/E,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,QAAA,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,YAAY,EAAE,CAAC;AACnD,QAAA,OAAO,MAAM,CAAC;AACf,KAAA;AAED,IAAA,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC,EAAA;AAEjC,IAAA,IAAI,MAAc,CAAC;AACnB,IAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;AAChB,KAAA;AAAM,SAAA;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;AAC/B,QAAA,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;AAC/B,YAAA,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;AACxB,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,iBAAmD,EACnD,OAA6B,EAAA;IAE7B,MAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;AAClF,KAAA;AAED,IAAA,IAAI,iBAAiB,EAAE;AACrB,QAAA,SAAS,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;AACnC,KAAA;IAED,MAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,MAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;AAC/C,QAAA,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;AAC5F,KAAA;AACD,IAAA,SAAS,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;IACjC,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;AAEzE,IAAA,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;AAC1B,QAAA,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;AACzC,QAAA,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;AACzC,QAAA,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;AACzC,KAAA;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;AAE3E,IAAA,IAAIC,eAAM,EAAE;QACV,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;AAEnD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;AAKG;AACa,SAAA,yBAAyB,CACvC,eAAwC,EACxC,iBAAwC,EAAA;IAExC,MAAM,sBAAsB,GAA2B,EAAE,CAAC;IAE1D,IAAI,eAAe,CAAC,iBAAiB,EAAE;AACrC,QAAA,sBAAsB,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;AAC7C,KAAA;IAED,IAAI,cAAc,GAAG,SAAS,CAAC;IAC/B,IAAI,eAAe,CAAC,gBAAgB,IAAI,eAAe,CAAC,gBAAgB,CAAC,eAAe,EAAE;QACxF,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;;;AAIrE,QAAA,MAAM,oBAAoB,GAAG,wBAAwB,EAAE,CAAC;QACxD,IAAI,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE;AACtD,YAAA,aAAa,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AAC1C,SAAA;AAED,QAAA,cAAc,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1C,KAAA;IAED,MAAM,gBAAgB,mCACjB,uBAAuB,CAAA,EACvB,eAAe,CAAC,gBAAgB,CACpC,CAAC;IAEF,MAAM,YAAY,mCACb,mBAAmB,CAAA,EACnB,eAAe,CAAC,YAAY,CAChC,CAAC;IAEF,MAAM,eAAe,mCAChB,sBAAsB,CAAA,EACtB,eAAe,CAAC,eAAe,CACnC,CAAC;AAEF,IAAA,IAAIA,eAAM,EAAE;QACV,sBAAsB,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;AACxE,KAAA;IAED,MAAM,sBAAsB,mCACvB,6BAA6B,CAAA,EAC7B,eAAe,CAAC,sBAAsB,CAC1C,CAAC;AAEF,IAAA,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,eAAe,CAAC,cAAc,CAClC,CAAC;IAEF,sBAAsB,CAAC,IAAI,CACzB,aAAa,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,EAC5C,eAAe,CAAC,gBAAgB,CAAC,EACjC,eAAe,CAAC,EAAE,KAAK,EAAE,cAAc,EAAE,CAAC,EAC1C,6BAA6B,EAAE,EAC/B,qBAAqB,CAAC,sBAAsB,CAAC,oBAAoB,CAAC,EAClE,qBAAqB,EAAE,EACvB,sBAAsB,EAAE,EACxB,sBAAsB,CACpB,YAAY,CAAC,UAAU,EACvB,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB,CAC/B,CACF,CAAC;IAEF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,sBAAsB,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;AACzE,KAAA;AAED,IAAA,IAAI,iBAAiB,EAAE;AACrB,QAAA,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;AAChD,KAAA;IAED,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC;AAEvD,IAAA,IAAIA,eAAM,IAAI,eAAe,CAAC,kBAAkB,KAAK,KAAK,EAAE;AAC1D,QAAA,sBAAsB,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;AACnE,KAAA;IAED,OAAO;QACL,UAAU,EAAE,eAAe,CAAC,UAAU;QACtC,sBAAsB;KACvB,CAAC;AACJ,CAAC;AAsBD,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB,EAAA;AAEtB,IAAA,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAEK,SAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB,EAAA;;AAEtB,IAAA,IAAI,KAAU,CAAC;AACf,IAAA,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;AACrC,QAAA,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;AACjC,KAAA;IACD,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,iBAAiB,CAAC;AACxE,IAAA,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;AAChC,QAAA,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;AAC9B,gBAAA,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;AACtC,aAAA;AAAM,iBAAA;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;AACF,gBAAA,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;AACvC,oBAAA,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;AACnF,iBAAA;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;AAC5B,gBAAA,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;AACb,wBAAA,eAAe,CAAC,QAAQ;AACxB,6BAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;AAClE,iBAAA;AACD,gBAAA,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;AAC7F,aAAA;;YAGD,MAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;AACtF,SAAA;AACF,KAAA;AAAM,SAAA;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;AACZ,SAAA;AAED,QAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,MAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;AACF,YAAA,MAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;AAChE,YAAA,MAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;YAEF,MAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,EAAE,iBAAiB,CAAC,CAAC;AAC3F,YAAA,IAAI,aAAa,KAAK,SAAS,IAAI,aAAa,KAAK,IAAI,EAAE;gBACzD,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;AACZ,iBAAA;AACD,gBAAA,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;AACrC,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB,EAAA;AAEvB,IAAA,MAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;AACpC,QAAA,MAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;QAEnD,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,iBAAiB,IAAI,MAAM,EAAE;AAC1E,YAAA,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;AACpC,SAAA;AAAM,aAAA;YACL,MAAM;AACP,SAAA;AACF,KAAA;AACD,IAAA,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;AAC9B,QAAA,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;AAC9B,QAAA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;AAC7B,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;AAKG;AACa,SAAA,eAAe,CAC7B,SAAgC,EAChC,YAA2C,EAAA;AAE3C,IAAA,MAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;AAC9C,IAAA,MAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;AAE3D,IAAA,MAAM,oBAAoB,GAAG,CAC3B,GAAM,KAGJ;AACF,QAAA,OAAO,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;AAC7C,YAAA,KAAK,EAAE,SAAS;AACjB,SAAA,CAEA,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,IAAI,UAAU,EAAE;AACd,QAAA,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,YAAA,OAAO,oBAAoB,CACtB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,aAAa,CAChB,EAAA,EAAA,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;AACJ,SAAA;AAED,QAAA,MAAM,eAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;QAC3F,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,IAAI,CAC1D,CAAC,CAAC,KAAK,eAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,CAChD,CAAC;AACF,QAAA,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;AACjD,YAAA,MAAM,aAAa,GAAG,CAAC,IAAI,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,CAAyB,CAAC;YAEhF,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE;AAC9C,gBAAA,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChD,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,aAAa,EAAE;gBACjB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE;oBAC5C,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AACzC,iBAAA;AACF,aAAA;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;AACpC,YAAA,OAAO,aAAa,CAAC;AACtB,SAAA;AAED,QAAA,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,iCACtB,aAAa,CAAA,EACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACJ,SAAA;AACF,KAAA;AAED,IAAA,IACE,UAAU;AACV,QAAA,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;AACnC,QAAAC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;QAEA,OAAO,oBAAoB,CACtB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,aAAa,CAChB,EAAA,EAAA,IAAI,EAAE,SAAS,CAAC,UAAU,EAAA,CAAA,CAC1B,CAAC;AACJ,KAAA;IAED,OAAO,oBAAoB,iCACtB,aAAa,CAAA,EACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,OAA8B,EAC9B,OAAgB,EAAA;AAEhB,IAAA,IAAI,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;QAC7B,OAAO,OAAO,CAAC,gBAAgB,CAAC;AACjC,KAAA;AAED,IAAA,IAAI,OAAO,EAAE;QACX,OAAO,CAAA,EAAG,OAAO,CAAA,SAAA,CAAW,CAAC;AAC9B,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB;;ACnjCA;AA2BA;;;;;;;;AAQG;AACG,SAAU,kBAAkB,CAChC,IAAgB,EAAA;AAKhB,IAAA,OAAOC,8BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C;;AC3CA;AACA;AAIA;;AAEG;AACI,MAAM,oBAAoB,GAAG,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;AAqBlD;;;;;;AAMG;MACU,wBAAwB,CAAA;AAInC;;;AAGG;AACH,IAAA,WAAA,CAAY,uBAA+B,oBAAoB,EAAA;QANvD,IAAW,CAAA,WAAA,GAAiB,SAAS,CAAC;AAO5C,QAAA,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;KAClD;AAED;;;AAGG;AACH,IAAA,cAAc,CAAC,WAAoC,EAAA;AACjD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;KAChC;AAED;;AAEG;IACH,cAAc,GAAA;QACZ,IACE,IAAI,CAAC,WAAW;AAChB,YAAA,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAC7E;AACA,YAAA,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;AAC9B,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC;KACzB;AACF;;ACrED;AACA;AAIA;;;;AAIG;MACU,oBAAoB,CAAA;AAI/B,IAAA,WAAA,CACU,UAA2B,EAC3B,MAAyB,EACzB,uCAA+C,KAAK,EAAA;QAFpD,IAAU,CAAA,UAAA,GAAV,UAAU,CAAiB;QAC3B,IAAM,CAAA,MAAA,GAAN,MAAM,CAAmB;QACzB,IAAoC,CAAA,oCAAA,GAApC,oCAAoC,CAAgB;QALtD,IAAU,CAAA,UAAA,GAAG,CAAC,CAAC;KAMnB;AAEJ;;;AAGG;IACI,OAAO,GAAA;;AAEZ,QAAA,QACE,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,oCAAoC,EAC5F;KACH;AAED;;;;;AAKG;IACK,MAAM,QAAQ,CAAC,OAAwB,EAAA;AAC7C,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;AAC7B,QAAA,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnE,QAAA,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QACzB,OAAO,KAAK,IAAI,SAAS,CAAC;KAC3B;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,OAAwB,EAAA;AACrC,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACvC,SAAA;QAED,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AACF;;ACvDD;AASA,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;;AAEG;MACU,8BAA8B,CAAA;AAiBzC;;;;;;AAMG;AACH,IAAA,WAAA,CACE,QAAgB,EAChB,QAAgB,EAChB,sBAA8B,4BAA4B,EAAA;AAhB5D;;;AAGG;QACH,IAAmB,CAAA,mBAAA,GAAW,4BAA4B,CAAC;AAczD,QAAA,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACzF,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACzF,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;AAED;;;;;AAKG;AACH,IAAA,WAAW,CAAC,WAA4B,EAAA;QACtC,MAAM,WAAW,GAAG,CAAA,EAAG,IAAI,CAAC,QAAQ,CAAA,CAAA,EAAI,IAAI,CAAC,QAAQ,CAAA,CAAE,CAAC;AACxD,QAAA,MAAM,kBAAkB,GAAG,CAAG,EAAA,IAAI,CAAC,mBAAmB,CAAA,CAAA,EAAIC,YAAmB,CAAC,WAAW,CAAC,EAAE,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;AAAE,YAAA,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;AAC3E,QAAA,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;AACF;;ACpED;AAqBA;;AAEG;MACU,iBAAiB,CAAA;AAU5B;;AAEG;AACH,IAAA,WAAA,CAAY,OAAgC,EAAA;AAC1C,QAAA,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AAClE,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,wHAAA,CAA0H,CAC3H,CAAC;AACH,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AACjC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;KAChC;AAED;;;;;AAKG;AACH,IAAA,WAAW,CAAC,WAA4B,EAAA;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,CAAA,qEAAA,CAAuE,CAAC,CACnF,CAAC;AACH,SAAA;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;AACjB,YAAA,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;AACxB,gBAAA,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;AACzC,aAAA;AACD,YAAA,KAAK,MAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;AACtC,gBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;AAChE,aAAA;AACF,SAAA;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAA,yCAAA,CAA2C,CAAC,CAAC,CAAC;AAC/E,aAAA;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;AACpC,gBAAA,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;AACxB,aAAA;AACD,YAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AAClC,oBAAA,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;AACxB,iBAAA;AACD,gBAAA,WAAW,CAAC,GAAG,IAAI,CAAA,EAAG,GAAG,CAAA,CAAA,EAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;AAClD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;AACF;;ACtFD;AAKA;;AAEG;AACG,MAAO,gBAAiB,SAAQ,iBAAiB,CAAA;AACrD;;;;AAIG;AACH,IAAA,WAAA,CAAY,QAAgB,EAAA;QAC1B,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;AAC3D,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,MAAM,OAAO,GAA4B;AACvC,YAAA,QAAQ,EAAE;AACR,gBAAA,aAAa,EAAE,QAAQ;AACxB,aAAA;SACF,CAAC;QACF,KAAK,CAAC,OAAO,CAAC,CAAC;KAChB;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dom-shim.d.ts b/node_modules/@azure/core-http/dom-shim.d.ts new file mode 100644 index 0000000..cb4ac4d --- /dev/null +++ b/node_modules/@azure/core-http/dom-shim.d.ts @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// d.ts shims provide types for things we use internally but are not part +// of this package's surface area. + +interface Request {} +interface RequestInit {} +interface Response {} +interface Headers {} diff --git a/node_modules/@azure/core-http/node_modules/form-data/License b/node_modules/@azure/core-http/node_modules/form-data/License new file mode 100644 index 0000000..c7ff12a --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/@azure/core-http/node_modules/form-data/README.md.bak b/node_modules/@azure/core-http/node_modules/form-data/README.md.bak new file mode 100644 index 0000000..298a1a2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/README.md.bak @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@azure/core-http/node_modules/form-data/Readme.md b/node_modules/@azure/core-http/node_modules/form-data/Readme.md new file mode 100644 index 0000000..298a1a2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/Readme.md @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@azure/core-http/node_modules/form-data/index.d.ts b/node_modules/@azure/core-http/node_modules/form-data/index.d.ts new file mode 100644 index 0000000..295e9e9 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js b/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000..09e7c70 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js b/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000..18dc819 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js @@ -0,0 +1,501 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var Stream = require('stream').Stream; +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js b/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000..4d35738 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/@azure/core-http/node_modules/form-data/package.json b/node_modules/@azure/core-http/node_modules/form-data/package.json new file mode 100644 index 0000000..0f20240 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/@azure/core-http/package.json b/node_modules/@azure/core-http/package.json new file mode 100644 index 0000000..b1ae84d --- /dev/null +++ b/node_modules/@azure/core-http/package.json @@ -0,0 +1,177 @@ +{ + "name": "@azure/core-http", + "sdk-type": "client", + "author": "Microsoft Corporation", + "version": "3.0.4", + "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "engines": { + "node": ">=14.0.0" + }, + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime", + "azure", + "cloud" + ], + "main": "dist/index.js", + "module": "./dist-esm/src/index.js", + "types": "./types/latest/src/index.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/src/*": [ + "types/3.1/src/*" + ] + } + }, + "files": [ + "dist/", + "dist-esm/src/", + "dom-shim.d.ts", + "types/*/src/**/*.d.ts", + "types/*/src/**/*.d.ts.map", + "README.md", + "LICENSE" + ], + "browser": { + "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.browser.js", + "./dist-esm/src/policies/disableResponseDecompressionPolicy.js": "./dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js", + "./dist-esm/src/policies/proxyPolicy.js": "./dist-esm/src/policies/proxyPolicy.browser.js", + "./dist-esm/src/util/base64.js": "./dist-esm/src/util/base64.browser.js", + "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.browser.js", + "./dist-esm/src/defaultHttpClient.js": "./dist-esm/src/defaultHttpClient.browser.js", + "./dist-esm/src/util/inspect.js": "./dist-esm/src/util/inspect.browser.js" + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/index.js", + "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.js", + "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.native.js" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p tsconfig.es.json && dev-tool run bundle", + "build:types": "downlevel-dts types/latest/ types/3.1/", + "build": "npm run clean && tsc -p tsconfig.es.json && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p tsconfig.es.json && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p tsconfig.es.json && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p tsconfig.es.json && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "cross-env TS_NODE_FILES=true mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 500000 --full-trace --exclude \"test/**/*.browser.ts\" \"test/**/*.ts\"" + }, + "sideEffects": false, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + }, + "//metadata": { + "constantPaths": [ + { + "path": "src/util/constants.ts", + "prefix": "coreHttpVersion" + } + ] + }, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-util": "^1.1.1", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.5.0" + }, + "devDependencies": { + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/dev-tool": "^1.0.0", + "@azure/logger-js": "^1.0.2", + "@azure/test-utils": "^1.0.0", + "@microsoft/api-extractor": "^7.31.1", + "@opentelemetry/api": "^1.4.0", + "@types/chai": "^4.1.6", + "@types/express": "^4.16.0", + "@types/mocha": "^7.0.2", + "@types/node": "^14.0.0", + "@types/sinon": "^9.0.4", + "@types/trusted-types": "^2.0.0", + "@types/uuid": "^8.0.0", + "@types/xml2js": "^0.4.11", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "downlevel-dts": "^0.10.0", + "eslint": "^8.0.0", + "express": "^4.16.3", + "fetch-mock": "^9.10.1", + "karma": "^6.2.0", + "karma-chai": "^0.1.0", + "karma-chrome-launcher": "^3.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-firefox-launcher": "^1.1.0", + "karma-mocha": "^2.0.1", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "npm-run-all": "^4.1.5", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "puppeteer": "^19.2.2", + "regenerator-runtime": "^0.13.3", + "rimraf": "^3.0.0", + "shx": "^0.3.2", + "sinon": "^9.0.2", + "ts-node": "^10.0.0", + "typescript": "~4.8.0", + "uglify-js": "^3.4.9", + "xhr-mock": "^2.4.1" + } +} diff --git a/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts b/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts new file mode 100644 index 0000000..68a4bdd --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts @@ -0,0 +1,32 @@ +import { Span } from "@azure/core-tracing"; +import { OperationOptions } from "./operationOptions"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use core-tracing instead. + * @hidden + */ +export interface SpanConfig { + /** + * Package name prefix + */ + packagePrefix: string; + /** + * Service namespace + */ + namespace: string; +} +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export declare function createSpanFunction(args: SpanConfig): (operationName: string, operationOptions: T) => { + span: Span; + updatedOptions: T; +}; +//# sourceMappingURL=createSpanLegacy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts new file mode 100644 index 0000000..7ea7d81 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts @@ -0,0 +1,48 @@ +import { AccessToken } from "@azure/core-auth"; +/** + * Defines the default token refresh buffer duration. + */ +export declare const TokenRefreshBufferMs: number; +/** + * Provides a cache for an AccessToken that was that + * was returned from a TokenCredential. + */ +export interface AccessTokenCache { + /** + * Sets the cached token. + * + * @param accessToken - The {@link AccessToken} to be cached or null to + * clear the cached token. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached {@link AccessToken} or undefined if nothing is cached. + */ + getCachedToken(): AccessToken | undefined; +} +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class ExpiringAccessTokenCache implements AccessTokenCache { + private tokenRefreshBufferMs; + private cachedToken?; + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs?: number); + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken(): AccessToken | undefined; +} +//# sourceMappingURL=accessTokenCache.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts new file mode 100644 index 0000000..38451d5 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts @@ -0,0 +1,32 @@ +import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth"; +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class AccessTokenRefresher { + private credential; + private scopes; + private requiredMillisecondsBeforeNewRefresh; + private promise; + private lastCalled; + constructor(credential: TokenCredential, scopes: string | string[], requiredMillisecondsBeforeNewRefresh?: number); + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady(): boolean; + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + private getToken; + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options: GetTokenOptions): Promise; +} +//# sourceMappingURL=accessTokenRefresher.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts new file mode 100644 index 0000000..9da8922 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,44 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 0000000..8ba0538 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,36 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + /** + * Username + */ + userName: string; + /** + * Password + */ + password: string; + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts new file mode 100644 index 0000000..4be8a9f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts @@ -0,0 +1,6 @@ +/** + * A function that receives a challenge and resolves a promise with a string token. + * @deprecated The Authenticator type is not currently in use. + */ +export declare type Authenticator = (challenge: unknown) => Promise; +//# sourceMappingURL=credentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts new file mode 100644 index 0000000..5897a2e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,14 @@ +import { WebResourceLike } from "../webResource"; +/** + * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header). + */ +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike/request to be signed. + * @returns The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts new file mode 100644 index 0000000..a96bb0f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts @@ -0,0 +1,13 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts new file mode 100644 index 0000000..26fcaef --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts new file mode 100644 index 0000000..c466858 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts new file mode 100644 index 0000000..6f85302 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts new file mode 100644 index 0000000..40c4cf1 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts @@ -0,0 +1,3 @@ +import { HttpClient } from "./httpClient"; +export declare function getCachedDefaultHttpClient(): HttpClient; +//# sourceMappingURL=httpClientCache.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts new file mode 100644 index 0000000..1c033c6 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts @@ -0,0 +1,136 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders implements HttpHeadersLike { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts new file mode 100644 index 0000000..70acd55 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts @@ -0,0 +1,78 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; +} +/** + * The flattened response to a REST call. + * Contains the underlying {@link HttpOperationResponse} as well as + * the merged properties of the `parsedBody`, `parsedHeaders`, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + /** + * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body. + */ + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts new file mode 100644 index 0000000..26520c4 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts new file mode 100644 index 0000000..1cf5284 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/index.d.ts b/node_modules/@azure/core-http/types/3.1/src/index.d.ts new file mode 100644 index 0000000..ef3578b --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/index.d.ts @@ -0,0 +1,52 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, } from "./webResource"; +export { CommonResponse, CommonRequestInit, CommonRequestInfo } from "./nodeFetchHttpClient"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationOptions, OperationRequestOptions, operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { ServiceClient, ServiceClientOptions, flattenResponse, createPipelineFromOptions, ProxySettings, ProxyOptions, } from "./serviceClient"; +export { PipelineOptions, InternalPipelineOptions } from "./pipelineOptions"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { LogPolicyOptions, logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryOptions, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy, RedirectOptions } from "./policies/redirectPolicy"; +export { keepAlivePolicy, KeepAliveOptions } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, UserAgentOptions, TelemetryInfo, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, DeserializationOptions, deserializeResponseBody, DeserializationContentTypes, } from "./policies/deserializationPolicy"; +export { tracingPolicy, TracingPolicyOptions } from "./policies/tracingPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isDuration, } from "./util/utils"; +export { isNode } from "@azure/core-util"; +export { URLBuilder, URLQuery } from "./url"; +export { AbortSignalLike } from "@azure/abort-controller"; +export { delay } from "@azure/core-util"; +export { createSpanFunction, SpanConfig } from "./createSpanLegacy"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from "@azure/core-auth"; +export { AccessTokenCache, ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from "./util/serializer.common"; +//# sourceMappingURL=index.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/log.d.ts b/node_modules/@azure/core-http/types/3.1/src/log.d.ts new file mode 100644 index 0000000..c757fef --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/log.d.ts @@ -0,0 +1,2 @@ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=log.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts new file mode 100644 index 0000000..4ca2122 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts @@ -0,0 +1,63 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { Transform } from "stream"; +import { TransferProgressEvent, WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * String URLs used when calling to `fetch()`. + */ +export declare type CommonRequestInfo = string; +/** + * An object containing information about the outgoing HTTP request. + */ +export declare type CommonRequestInit = Pick> & { + body?: any; + headers?: any; + signal?: any; +}; +/** + * An object containing information about the incoming HTTP response. + */ +export declare type CommonResponse = Pick> & { + body: any; + trailer: any; + formData: any; +}; +export declare class ReportTransform extends Transform { + private progressCallback; + private loadedBytes; + _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void; + constructor(progressCallback: (progress: TransferProgressEvent) => void); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +/** + * An HTTP client that uses `node-fetch`. + */ +export declare class NodeFetchHttpClient implements HttpClient { + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + sendRequest(httpRequest: WebResourceLike): Promise; + private proxyAgentMap; + private keepAliveAgents; + private getOrCreateAgent; + /** + * Uses `node-fetch` to perform the request. + */ + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + /** + * Prepares a request based on the provided web resource. + */ + prepareRequest(httpRequest: WebResourceLike): Promise>; + /** + * Process an HTTP response. + */ + processRequest(_operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts new file mode 100644 index 0000000..88327d2 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts new file mode 100644 index 0000000..d193c98 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts @@ -0,0 +1,56 @@ +import { RequestOptionsBase, TransferProgressEvent } from "./webResource"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationTracingOptions } from "@azure/core-tracing"; +/** + * The base options type for all operations. + */ +export interface OperationOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: OperationRequestOptions; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options that allow configuring the handling of HTTP requests made by an SDK operation. + */ +export interface OperationRequestOptions { + /** + * User defined custom request headers that will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); +} +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export declare function operationOptionsToRequestOptionsBase(opts: T): RequestOptionsBase; +//# sourceMappingURL=operationOptions.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts new file mode 100644 index 0000000..4b79aae --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts @@ -0,0 +1,54 @@ +import { Mapper } from "./serializer"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +/** + * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values. + */ +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts new file mode 100644 index 0000000..bfe4a1f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts @@ -0,0 +1,19 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; + /** + * Indicates if this is an error response + */ + isError?: boolean; +} +//# sourceMappingURL=operationResponse.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts new file mode 100644 index 0000000..ff8bd2a --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts @@ -0,0 +1,77 @@ +import { Serializer } from "./serializer"; +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { HttpMethods } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +/** + * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The media type of the request body. + * This value can be used to aide in serialization if it is provided. + */ + readonly mediaType?: "json" | "xml" | "form" | "binary" | "multipart" | "text" | "unknown" | string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export declare function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set; +//# sourceMappingURL=operationSpec.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts b/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts new file mode 100644 index 0000000..edc81c5 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts @@ -0,0 +1,63 @@ +import { DeserializationOptions } from "./policies/deserializationPolicy"; +import { HttpClient } from "./httpClient"; +import { KeepAliveOptions } from "./policies/keepAlivePolicy"; +import { LogPolicyOptions } from "./policies/logPolicy"; +import { ProxyOptions } from "./serviceClient"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RetryOptions } from "./policies/exponentialRetryPolicy"; +import { UserAgentOptions } from "./policies/userAgentPolicy"; +/** + * Defines options that are used to configure the HTTP pipeline for + * an SDK client. + */ +export interface PipelineOptions { + /** + * The HttpClient implementation to use for outgoing HTTP requests. Defaults + * to DefaultHttpClient. + */ + httpClient?: HttpClient; + /** + * Options that control how to retry failed requests. + */ + retryOptions?: RetryOptions; + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for how HTTP connections should be maintained for future + * requests. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; +} +/** + * Defines options that are used to configure internal options of + * the HTTP pipeline for an SDK client. + */ +export interface InternalPipelineOptions extends PipelineOptions { + /** + * Options to configure API response deserialization. + */ + deserializationOptions?: DeserializationOptions; + /** + * Options to configure request/response logging. + */ + loggingOptions?: LogPolicyOptions; + /** + * Configure whether to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Send JSON Array payloads as NDJSON. + */ + sendStreamingJson?: boolean; +} +//# sourceMappingURL=pipelineOptions.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts new file mode 100644 index 0000000..2f06728 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts @@ -0,0 +1,33 @@ +import { TokenCredential } from "@azure/core-auth"; +import { RequestPolicyFactory } from "../policies/requestPolicy"; +interface TokenCyclerOptions { + /** + * The window of time before token expiration during which the token will be + * considered unusable due to risk of the token expiring before sending the + * request. + * + * This will only become meaningful if the refresh fails for over + * (refreshWindow - forcedRefreshWindow) milliseconds. + */ + forcedRefreshWindowInMs: number; + /** + * Interval in milliseconds to retry failed token refreshes. + */ + retryIntervalInMs: number; + /** + * The window of time before token expiration during which + * we will attempt to refresh the token. + */ + refreshWindowInMs: number; +} +export declare const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions; +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export declare function bearerTokenAuthenticationPolicy(credential: TokenCredential, scopes: string | string[]): RequestPolicyFactory; +export {}; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts new file mode 100644 index 0000000..e088109 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts @@ -0,0 +1,59 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { SerializerOptions } from "../util/serializer.common"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to configure API response deserialization. + */ +export interface DeserializationOptions { + /** + * Configures the expected content types for the deserialization of + * JSON and XML response bodies. + */ + expectedContentTypes: DeserializationContentTypes; +} +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +export declare const DefaultDeserializationOptions: DeserializationOptions; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + readonly xmlCharKey: string; + constructor(nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions, deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions); + sendRequest(request: WebResourceLike): Promise; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse, options?: SerializerOptions): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts new file mode 100644 index 0000000..72def6e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts @@ -0,0 +1,13 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts new file mode 100644 index 0000000..73710e6 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts @@ -0,0 +1,29 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 0000000..2662d91 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,73 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export declare enum RetryMode { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + Exponential = 0 +} +/** + * Options that control how to retry failed requests. + */ +export interface RetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; + /** + * The amount of delay in milliseconds between retry attempts. Defaults to 30000 + * (30 seconds). The delay increases exponentially with each retry up to a maximum + * specified by maxRetryDelayInMs. + */ + retryDelayInMs?: number; + /** + * The maximum delay in milliseconds allowed before retrying an operation. Defaults + * to 90000 (90 seconds). + */ + maxRetryDelayInMs?: number; + /** + * Currently supporting only Exponential mode. + */ + mode?: RetryMode; +} +export declare const DefaultRetryOptions: RetryOptions; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 0000000..62203ad --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,14 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts new file mode 100644 index 0000000..dd195c4 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts @@ -0,0 +1,46 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how HTTP connections should be maintained for future + * requests. + */ +export interface KeepAliveOptions { + /** + * When true, connections will be kept alive for multiple requests. + * Defaults to true. + */ + enable: boolean; +} +/** + * By default, HTTP connections are maintained for future requests. + */ +export declare const DefaultKeepAliveOptions: KeepAliveOptions; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export declare function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory; +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export declare class KeepAlivePolicy extends BaseRequestPolicy { + private readonly keepAliveOptions; + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, keepAliveOptions: KeepAliveOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=keepAlivePolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts new file mode 100644 index 0000000..c6561af --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts @@ -0,0 +1,77 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Debugger } from "@azure/logger"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Sanitizer } from "../util/sanitizer"; +import { WebResourceLike } from "../webResource"; +/** + * Options to pass to the {@link logPolicy}. + * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged. + */ +export interface LogPolicyOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to: + * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id, + * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials, + * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, + * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding, + * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match, + * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent. + * + * Any headers specified in this field will be added to that list. + * Any other values will be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; + /** + * The Debugger (logger) instance to use for writing pipeline logs. + */ + logger?: Debugger; +} +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export declare function logPolicy(loggingOptions?: LogPolicyOptions): RequestPolicyFactory; +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export declare class LogPolicy extends BaseRequestPolicy { + logger: Debugger; + sanitizer: Sanitizer; + /* + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + + + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + allowedHeaderNames: Set; + /* + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + + + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + allowedQueryParameters: Set; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, { logger, allowedHeaderNames, allowedQueryParameters, }?: LogPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + private logRequest; + private logResponse; +} +//# sourceMappingURL=logPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 0000000..09a257f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 0000000..2e50b8b --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts new file mode 100644 index 0000000..57c6b43 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.native.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts new file mode 100644 index 0000000..72e4ed6 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts @@ -0,0 +1,3 @@ +import { RequestPolicyFactory } from "./requestPolicy"; +export declare function ndJsonPolicy(): RequestPolicyFactory; +//# sourceMappingURL=ndJsonPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts new file mode 100644 index 0000000..963560d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts new file mode 100644 index 0000000..f202890 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts @@ -0,0 +1,37 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export declare const globalNoProxyList: string[]; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export declare function proxyPolicy(proxySettings?: ProxySettings, options?: { + /** a list of patterns to override those loaded from NO_PROXY environment variable. */ + customNoProxyList?: string[]; +}): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + private customNoProxyList?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, proxySettings: ProxySettings, customNoProxyList?: string[] | undefined); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts new file mode 100644 index 0000000..dcf0aa9 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts @@ -0,0 +1,33 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /** + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + /** + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts new file mode 100644 index 0000000..3fb5d15 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts @@ -0,0 +1,102 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +/** + * The underlying structure of a request policy. + */ +export interface RequestPolicy { + /** + * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made. + * @param httpRequest - {@link WebResourceLike} describing the request to be made. + */ + sendRequest(httpRequest: WebResourceLike): Promise; +} +/** + * The base class from which all request policies derive. + */ +export declare abstract class BaseRequestPolicy implements RequestPolicy { + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + readonly _nextPolicy: RequestPolicy; + /** + * The options that can be passed to a given request policy. + */ + readonly _options: RequestPolicyOptionsLike; + /** + * The main method to implement that manipulates a request/response. + */ + protected constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy: RequestPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options: RequestPolicyOptionsLike); + /** + * Sends a network request based on the given web resource. + * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made. + */ + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 0000000..f17bc67 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts new file mode 100644 index 0000000..577d72f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts @@ -0,0 +1,20 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 0000000..cdb8a54 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 0000000..193dc25 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,35 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +declare type ResponseHandler = (httpRequest: WebResourceLike, response: HttpOperationResponse) => Promise; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export declare function throttlingRetryPolicy(): RequestPolicyFactory; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private _handleResponse; + private numberOfRetries; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _handleResponse?: ResponseHandler); + sendRequest(httpRequest: WebResourceLike): Promise; + private _defaultResponseHandler; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +export {}; +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts new file mode 100644 index 0000000..793ef1c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts @@ -0,0 +1,31 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Span } from "@azure/core-tracing"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to customize the tracing policy. + */ +export interface TracingPolicyOptions { + /** + * User agent used to better identify the outgoing requests traced by the tracing policy. + */ + userAgent?: string; +} +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory; +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export declare class TracingPolicy extends BaseRequestPolicy { + private userAgent?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + tryCreateSpan(request: WebResourceLike): Span | undefined; + private tryProcessError; + private tryProcessResponse; +} +//# sourceMappingURL=tracingPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts new file mode 100644 index 0000000..6a4b2f7 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts @@ -0,0 +1,49 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Telemetry information. Key/value pairs to include inside the User-Agent string. + */ +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +/** + * Options for adding user agent details to outgoing requests. + */ +export interface UserAgentOptions { + /** + * String prefix to add to the user agent for outgoing requests. + * Defaults to an empty string. + */ + userAgentPrefix?: string; +} +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export declare function getDefaultUserAgentValue(): string; +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptions; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts b/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts new file mode 100644 index 0000000..0d1cf99 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts @@ -0,0 +1,14 @@ +/// +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; +import { HttpHeadersLike } from "./httpHeaders"; +import { ProxySettings } from "./serviceClient"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export declare function isUrlHttps(url: string): boolean; +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: tunnel.HttpsOverHttpsOptions): http.Agent | https.Agent; +//# sourceMappingURL=proxyAgent.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts b/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts new file mode 100644 index 0000000..b68b15d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts @@ -0,0 +1,26 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + /** + * CSV: Each pair of segments joined by a single comma. + */ + Csv = ",", + /** + * SSV: Each pair of segments joined by a single space character. + */ + Ssv = " ", + /** + * TSV: Each pair of segments joined by a single tab character. + */ + Tsv = "\t", + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + Pipes = "|", + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/restError.d.ts b/node_modules/@azure/core-http/types/3.1/src/restError.d.ts new file mode 100644 index 0000000..960d2ff --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/restError.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export declare class RestError extends Error { + /** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ + static readonly REQUEST_SEND_ERROR: string; + /** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ + static readonly PARSE_ERROR: string; + /** + * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT). + */ + code?: string; + /** + * The HTTP status code of the response, if one was returned. + */ + statusCode?: number; + /** + * Outgoing request. + */ + request?: WebResourceLike; + /** + * Incoming response. + */ + response?: HttpOperationResponse; + /** + * Any additional details. In the case of deserialization errors, can be the processed response. + */ + details?: unknown; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse); +} +//# sourceMappingURL=restError.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts b/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts new file mode 100644 index 0000000..1c3b025 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts @@ -0,0 +1,361 @@ +import { SerializerOptions } from "./util/serializer.common"; +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export declare class Serializer { + /** + * The provided model mapper. + */ + readonly modelMappers: { + [key: string]: any; + }; + /** + * Whether the contents are XML or not. + */ + readonly isXML?: boolean | undefined; + constructor( + /** + * The provided model mapper. + */ + modelMappers?: { + [key: string]: any; + }, + /** + * Whether the contents are XML or not. + */ + isXML?: boolean | undefined); + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper: Mapper, value: unknown, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper: Mapper, object: unknown, objectName?: string, options?: SerializerOptions): any; + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper: Mapper, responseBody: unknown, objectName: string, options?: SerializerOptions): any; +} +/** + * Description of various value constraints such as integer ranges and string regex. + */ +export interface MapperConstraints { + /** + * The value should be less than or equal to the `InclusiveMaximum` value. + */ + InclusiveMaximum?: number; + /** + * The value should be less than the `ExclusiveMaximum` value. + */ + ExclusiveMaximum?: number; + /** + * The value should be greater than or equal to the `InclusiveMinimum` value. + */ + InclusiveMinimum?: number; + /** + * The value should be greater than the `InclusiveMinimum` value. + */ + ExclusiveMinimum?: number; + /** + * The length should be smaller than the `MaxLength`. + */ + MaxLength?: number; + /** + * The length should be bigger than the `MinLength`. + */ + MinLength?: number; + /** + * The value must match the pattern. + */ + Pattern?: RegExp; + /** + * The value must contain fewer items than the MaxItems value. + */ + MaxItems?: number; + /** + * The value must contain more items than the `MinItems` value. + */ + MinItems?: number; + /** + * The value must contain only unique items. + */ + UniqueItems?: true; + /** + * The value should be exactly divisible by the `MultipleOf` value. + */ + MultipleOf?: number; +} +/** + * Type of the mapper. Includes known mappers. + */ +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +/** + * The type of a simple mapper. + */ +export interface SimpleMapperType { + /** + * Name of the type of the property. + */ + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +/** + * Helps build a mapper that describes how to map a set of properties of an object based on other mappers. + * + * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`. + */ +export interface CompositeMapperType { + /** + * Name of the composite mapper type. + */ + name: "Composite"; + /** + * Use `className` to reference another type definition. + */ + className?: string; + /** + * Use `modelProperties` when the reference to the other type has been resolved. + */ + modelProperties?: { + [propertyName: string]: Mapper; + }; + /** + * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object. + */ + additionalProperties?: Mapper; + /** + * The name of the top-most parent scheme, the one that has no parents. + */ + uberParent?: string; + /** + * A polymorphic discriminator. + */ + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +/** + * Helps build a mapper that describes how to parse a sequence of mapped values. + */ +export interface SequenceMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Sequence"; + /** + * The mapper to use to map each one of the properties of the sequence. + */ + element: Mapper; +} +/** + * Helps build a mapper that describes how to parse a dictionary of mapped values. + */ +export interface DictionaryMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Dictionary"; + /** + * The mapper to use to map the value of each property in the dictionary. + */ + value: Mapper; +} +/** + * Helps build a mapper that describes how to parse an enum value. + */ +export interface EnumMapperType { + /** + * Name of the enum type mapper. + */ + name: "Enum"; + /** + * Values allowed by this mapper. + */ + allowedValues: any[]; +} +/** + * The base definition of a mapper. Can be used for XML and plain JavaScript objects. + */ +export interface BaseMapper { + /** + * Name for the xml element + */ + xmlName?: string; + /** + * Xml element namespace + */ + xmlNamespace?: string; + /** + * Xml element namespace prefix + */ + xmlNamespacePrefix?: string; + /** + * Determines if the current property should be serialized as an attribute of the parent xml element + */ + xmlIsAttribute?: boolean; + /** + * Determines if the current property should be serialized as the inner content of the xml element + */ + xmlIsMsText?: boolean; + /** + * Name for the xml elements when serializing an array + */ + xmlElementName?: string; + /** + * Whether or not the current property should have a wrapping XML element + */ + xmlIsWrapped?: boolean; + /** + * Whether or not the current property is readonly + */ + readOnly?: boolean; + /** + * Whether or not the current property is a constant + */ + isConstant?: boolean; + /** + * Whether or not the current property is required + */ + required?: boolean; + /** + * Whether or not the current property allows mull as a value + */ + nullable?: boolean; + /** + * The name to use when serializing + */ + serializedName?: string; + /** + * Type of the mapper + */ + type: MapperType; + /** + * Default value when one is not explicitly provided + */ + defaultValue?: any; + /** + * Constraints to test the current value against + */ + constraints?: MapperConstraints; +} +/** + * Mappers are definitions of the data models used in the library. + * These data models are part of the Operation or Client definitions in the responses or parameters. + */ +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +/** + * Used to disambiguate discriminated type unions. + * For example, if response can have many shapes but also includes a 'kind' field (or similar), + * that field can be used to determine how to deserialize the response to the correct type. + */ +export interface PolymorphicDiscriminator { + /** + * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`. + */ + serializedName: string; + /** + * Name to use on the resulting object instead of the original property name. + * Useful since the JSON property could be difficult to work with. + * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`. + */ + clientName: string; + /** + * It may contain any other property. + */ + [key: string]: string; +} +/** + * A mapper composed of other mappers. + */ +export interface CompositeMapper extends BaseMapper { + /** + * The type descriptor of the `CompositeMapper`. + */ + type: CompositeMapperType; +} +/** + * A mapper describing arrays. + */ +export interface SequenceMapper extends BaseMapper { + /** + * The type descriptor of the `SequenceMapper`. + */ + type: SequenceMapperType; +} +/** + * A mapper describing plain JavaScript objects used as key/value pairs. + */ +export interface DictionaryMapper extends BaseMapper { + /** + * The type descriptor of the `DictionaryMapper`. + */ + type: DictionaryMapperType; + /** + * Optionally, a prefix to add to the header collection. + */ + headerCollectionPrefix?: string; +} +/** + * A mapper describing an enum value. + */ +export interface EnumMapper extends BaseMapper { + /** + * The type descriptor of the `EnumMapper`. + */ + type: EnumMapperType; +} +/** + * An interface representing an URL parameter value. + */ +export interface UrlParameterValue { + /** + * The URL value. + */ + value: string; + /** + * Whether to keep or skip URL encoding. + */ + skipUrlEncoding: boolean; +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export declare function serializeObject(toSerialize: unknown): any; +/** + * String enum containing the string types of property mappers. + */ +export declare const MapperType: { + Date: "Date"; + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts new file mode 100644 index 0000000..f77862c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts @@ -0,0 +1,168 @@ +import { Mapper, Serializer } from "./serializer"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { ServiceCallback } from "./util/utils"; +import { TokenCredential } from "@azure/core-auth"; +import { HttpClient } from "./httpClient"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { InternalPipelineOptions } from "./pipelineOptions"; +import { OperationArguments } from "./operationArguments"; +import { OperationResponse } from "./operationResponse"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +/** + * Options to configure a proxy for outgoing requests (Node.js only). + */ +export interface ProxySettings { + /** + * The proxy's host address. + */ + host: string; + /** + * The proxy host's port. + */ + port: number; + /** + * The user name to authenticate with the proxy, if required. + */ + username?: string; + /** + * The password to authenticate with the proxy, if required. + */ + password?: string; +} +/** + * An alias of {@link ProxySettings} for future use. + */ +export declare type ProxyOptions = ProxySettings; +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-useragent" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * If specified, will be used to build the BearerTokenAuthenticationPolicy. + */ + credentialScopes?: string | string[]; +} +/** + * ServiceClient sends service requests and receives responses. + */ +export declare class ServiceClient { + /** + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials?: TokenCredential | ServiceClientCredentials, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export declare function createPipelineFromOptions(pipelineOptions: InternalPipelineOptions, authPolicyFactory?: RequestPolicyFactory): ServiceClientOptions; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/url.d.ts b/node_modules/@azure/core-http/types/3.1/src/url.d.ts new file mode 100644 index 0000000..986a955 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/url.d.ts @@ -0,0 +1,160 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Get the keys of the query string. + */ + keys(): string[]; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts new file mode 100644 index 0000000..94d313e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts new file mode 100644 index 0000000..1b7a4de --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts new file mode 100644 index 0000000..cfa1b24 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts @@ -0,0 +1,72 @@ +/** + * A set of constants used internally when processing requests. + */ +export declare const Constants: { + /** + * The core-http version + */ + coreHttpVersion: string; + /** + * Specifies HTTP. + */ + HTTP: string; + /** + * Specifies HTTPS. + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + ServiceUnavailable: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts new file mode 100644 index 0000000..a97959e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts @@ -0,0 +1,40 @@ +import { HttpOperationResponse } from ".."; +export declare const DEFAULT_CLIENT_RETRY_COUNT = 3; +export declare const DEFAULT_CLIENT_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; +export declare function isNumber(n: unknown): n is number; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export declare function shouldRetry(retryLimit: number, predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean, retryData: RetryData, response?: HttpOperationResponse, error?: RetryError): boolean; +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export declare function updateRetryData(retryOptions: { + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; +}, retryData?: RetryData, err?: RetryError): RetryData; +//# sourceMappingURL=exponentialBackoffStrategy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts new file mode 100644 index 0000000..9f3d211 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts @@ -0,0 +1,2 @@ +export declare const custom: {}; +//# sourceMappingURL=inspect.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts new file mode 100644 index 0000000..d203786 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts @@ -0,0 +1,2 @@ +export declare const custom: symbol; +//# sourceMappingURL=inspect.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts new file mode 100644 index 0000000..972d545 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts @@ -0,0 +1,25 @@ +export interface SanitizerOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; +} +export declare class Sanitizer { + allowedHeaderNames: Set; + allowedQueryParameters: Set; + constructor({ allowedHeaderNames, allowedQueryParameters }?: SanitizerOptions); + sanitize(obj: unknown): string; + private sanitizeHeaders; + private sanitizeQuery; + private sanitizeObject; + private sanitizeUrl; +} +//# sourceMappingURL=sanitizer.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts new file mode 100644 index 0000000..e3e4e2d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts @@ -0,0 +1,26 @@ +/** + * Default key used to access the XML attributes. + */ +export declare const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export declare const XML_CHARKEY = "_"; +/** + * Options to govern behavior of xml parser and builder. + */ +export interface SerializerOptions { + /** + * indicates the name of the root element in the resulting XML when building XML. + */ + rootName?: string; + /** + * indicates whether the root element is to be included or not in the output when parsing XML. + */ + includeRoot?: boolean; + /** + * key used to access the XML value content when parsing XML. + */ + xmlCharKey?: string; +} +//# sourceMappingURL=serializer.common.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts new file mode 100644 index 0000000..544c3a3 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts @@ -0,0 +1,5 @@ +/** + * Maximum number of retries for the throttling retry policy + */ +export declare const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts new file mode 100644 index 0000000..c016e54 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts @@ -0,0 +1,127 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: unknown): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param err - The error occurred if any, while executing the request; otherwise null. + * @param result - The deserialized response body if an error did not occur. + * @param request - The raw/actual request sent to the server if an error did not occur. + * @param response - The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): (cb: Function) => void; +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): (cb: ServiceCallback) => void; +export declare function prepareXMLRootList(obj: unknown, elementName: string, xmlNamespaceKey?: string, xmlNamespace?: string): { + [s: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: unknown): boolean; +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=utils.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts new file mode 100644 index 0000000..dfba9cd --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts @@ -0,0 +1,4 @@ +import { SerializerOptions } from "./serializer.common"; +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +export declare function stringifyXML(content: unknown, opts?: SerializerOptions): string; +//# sourceMappingURL=xml.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts new file mode 100644 index 0000000..42c9056 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts @@ -0,0 +1,14 @@ +import { SerializerOptions } from "./serializer.common"; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export declare function stringifyXML(obj: unknown, opts?: SerializerOptions): string; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +//# sourceMappingURL=xml.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts b/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts new file mode 100644 index 0000000..d5bb529 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts @@ -0,0 +1,445 @@ +/// +import { Context, SpanOptions } from "@azure/core-tracing"; +import { HttpHeadersLike } from "./httpHeaders"; +import { Mapper } from "./serializer"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { OperationSpec } from "./operationSpec"; +import { ProxySettings } from "./serviceClient"; +import { SerializerOptions } from "./util/serializer.common"; +/** + * List of supported HTTP methods. + */ +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +/** + * Possible HTTP request body types + */ +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * A description of a HTTP request to be made to a remote server. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * A unique identifier for the request. Used for logging and tracing. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: unknown): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export declare class WebResource implements WebResourceLike { + /** + * URL of the outgoing request. + */ + url: string; + /** + * HTTP method to use. + */ + method: HttpMethods; + /** + * Request body. + */ + body?: any; + /** + * HTTP headers. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * Query added to the URL. + */ + query?: { + [key: string]: any; + }; + /** + * Specification of the HTTP request. + */ + operationSpec?: OperationSpec; + /** + * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination. + */ + withCredentials: boolean; + /** + * How long to wait in milliseconds before aborting the request. + */ + timeout: number; + /** + * What proxy to use, if necessary. + */ + proxySettings?: ProxySettings; + /** + * Whether to keep the HTTP connections alive throughout requests. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Unique identifier of the outgoing request. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating Spans. + */ + tracingContext?: Context; + constructor(url?: string, method?: HttpMethods, body?: unknown, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, decompressResponse?: boolean, streamResponseStatusCodes?: Set); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +/** + * Options to prepare an outgoing HTTP request. + */ +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: `{ "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } }` + * - query-parameter-value in "string" format: `{ "query-parameter-name": "query-parameter-value"}`. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}` + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: `{ "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } }` + * - path-parameter-value in "string" format: `{ "path-parameter-name": "path-parameter-value" }`. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * Form data, used to build the request body. + */ + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: Record; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Allows keeping track of the progress of uploading the outgoing request. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Allows keeping track of the progress of downloading the incoming response. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + /** + * Value of the parameter. + */ + value: any; + /** + * Disables URL encoding if set to true. + */ + skipUrlEncoding: boolean; + /** + * Parameter values may contain any other property. + */ + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * May contain other properties. + */ + [key: string]: any; + /** + * Options to override XML parsing/building behavior. + */ + serializerOptions?: SerializerOptions; +} +//# sourceMappingURL=webResource.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts new file mode 100644 index 0000000..f08fd80 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike; +//# sourceMappingURL=xhrHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts new file mode 100644 index 0000000..9b87f7c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts @@ -0,0 +1,32 @@ +import { Span } from "@azure/core-tracing"; +import { OperationOptions } from "./operationOptions"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use core-tracing instead. + * @hidden + */ +export interface SpanConfig { + /** + * Package name prefix + */ + packagePrefix: string; + /** + * Service namespace + */ + namespace: string; +} +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export declare function createSpanFunction(args: SpanConfig): (operationName: string, operationOptions: T) => { + span: Span; + updatedOptions: T; +}; +//# sourceMappingURL=createSpanLegacy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map new file mode 100644 index 0000000..3e83ac3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpanLegacy.d.ts","sourceRoot":"","sources":["../../../src/createSpanLegacy.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,IAAI,EAAuD,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;;;;GAKG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;;;;;;;GAQG;AACH,wBAAgB,kBAAkB,CAChC,IAAI,EAAE,UAAU,GACf,CAAC,CAAC,SAAS,gBAAgB,EAC5B,aAAa,EAAE,MAAM,EACrB,gBAAgB,EAAE,CAAC,KAChB;IAAE,IAAI,EAAE,IAAI,CAAC;IAAC,cAAc,EAAE,CAAC,CAAA;CAAE,CAErC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts new file mode 100644 index 0000000..f57bdba --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts @@ -0,0 +1,48 @@ +import { AccessToken } from "@azure/core-auth"; +/** + * Defines the default token refresh buffer duration. + */ +export declare const TokenRefreshBufferMs: number; +/** + * Provides a cache for an AccessToken that was that + * was returned from a TokenCredential. + */ +export interface AccessTokenCache { + /** + * Sets the cached token. + * + * @param accessToken - The {@link AccessToken} to be cached or null to + * clear the cached token. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached {@link AccessToken} or undefined if nothing is cached. + */ + getCachedToken(): AccessToken | undefined; +} +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class ExpiringAccessTokenCache implements AccessTokenCache { + private tokenRefreshBufferMs; + private cachedToken?; + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs?: number); + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken(): AccessToken | undefined; +} +//# sourceMappingURL=accessTokenCache.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map new file mode 100644 index 0000000..15d69fb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenCache.d.ts","sourceRoot":"","sources":["../../../../src/credentials/accessTokenCache.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE/C;;GAEG;AACH,eAAO,MAAM,oBAAoB,QAAgB,CAAC;AAElD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI,CAAC;IAE3D;;OAEG;IACH,cAAc,IAAI,WAAW,GAAG,SAAS,CAAC;CAC3C;AAED;;;;;;GAMG;AACH,qBAAa,wBAAyB,YAAW,gBAAgB;IAC/D,OAAO,CAAC,oBAAoB,CAAS;IACrC,OAAO,CAAC,WAAW,CAAC,CAA0B;IAE9C;;;OAGG;gBACS,oBAAoB,GAAE,MAA6B;IAI/D;;;OAGG;IACH,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI;IAI1D;;OAEG;IACH,cAAc,IAAI,WAAW,GAAG,SAAS;CAU1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts new file mode 100644 index 0000000..e3c8d38 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts @@ -0,0 +1,32 @@ +import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth"; +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class AccessTokenRefresher { + private credential; + private scopes; + private requiredMillisecondsBeforeNewRefresh; + private promise; + private lastCalled; + constructor(credential: TokenCredential, scopes: string | string[], requiredMillisecondsBeforeNewRefresh?: number); + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady(): boolean; + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + private getToken; + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options: GetTokenOptions): Promise; +} +//# sourceMappingURL=accessTokenRefresher.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map new file mode 100644 index 0000000..5cf3f0f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenRefresher.d.ts","sourceRoot":"","sources":["../../../../src/credentials/accessTokenRefresher.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEjF;;;;GAIG;AACH,qBAAa,oBAAoB;IAK7B,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,oCAAoC;IAN9C,OAAO,CAAC,OAAO,CAA+C;IAC9D,OAAO,CAAC,UAAU,CAAK;gBAGb,UAAU,EAAE,eAAe,EAC3B,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EACzB,oCAAoC,GAAE,MAAc;IAG9D;;;OAGG;IACI,OAAO,IAAI,OAAO;IAOzB;;;;;OAKG;YACW,QAAQ;IAOtB;;;OAGG;IACI,OAAO,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;CAO3E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts new file mode 100644 index 0000000..a1b9a64 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,44 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map new file mode 100644 index 0000000..e32be59 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAChC;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;CAChC;AAED;;GAEG;AACH,qBAAa,iBAAkB,YAAW,wBAAwB;IAChE;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAuB;IACjD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAuB;IAEhD;;OAEG;gBACS,OAAO,EAAE,uBAAuB;IAU5C;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAiCpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 0000000..1210598 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,36 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + /** + * Username + */ + userName: string; + /** + * Password + */ + password: string; + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map new file mode 100644 index 0000000..c2c5c3e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAKjD;;GAEG;AACH,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;;OAGG;IACH,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;OAMG;gBAED,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,EAChB,mBAAmB,GAAE,MAAqC;IAa5D;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAOpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts new file mode 100644 index 0000000..d8d1c5c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts @@ -0,0 +1,6 @@ +/** + * A function that receives a challenge and resolves a promise with a string token. + * @deprecated The Authenticator type is not currently in use. + */ +export declare type Authenticator = (challenge: unknown) => Promise; +//# sourceMappingURL=credentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map new file mode 100644 index 0000000..8617d1b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/credentials.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,oBAAY,aAAa,GAAG,CAAC,SAAS,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts new file mode 100644 index 0000000..a65d047 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,14 @@ +import { WebResourceLike } from "../webResource"; +/** + * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header). + */ +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike/request to be signed. + * @returns The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map new file mode 100644 index 0000000..8a9416e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CACrE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts new file mode 100644 index 0000000..5e939fc --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts @@ -0,0 +1,13 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map new file mode 100644 index 0000000..3adcd9d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/topicCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAA2B,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEjF;;GAEG;AACH,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD;;;;OAIG;gBACS,QAAQ,EAAE,MAAM;CAW7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts new file mode 100644 index 0000000..883145a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map new file mode 100644 index 0000000..2613c90 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.d.ts","sourceRoot":"","sources":["../../../src/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts new file mode 100644 index 0000000..a7381a2 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map new file mode 100644 index 0000000..c15b8dd --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.d.ts","sourceRoot":"","sources":["../../../src/defaultHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts new file mode 100644 index 0000000..5f5f00e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map new file mode 100644 index 0000000..060dbe0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.d.ts","sourceRoot":"","sources":["../../../src/httpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEzD;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,aAAa;CAAG"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts new file mode 100644 index 0000000..67ac61b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts @@ -0,0 +1,3 @@ +import { HttpClient } from "./httpClient"; +export declare function getCachedDefaultHttpClient(): HttpClient; +//# sourceMappingURL=httpClientCache.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map new file mode 100644 index 0000000..09a6518 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClientCache.d.ts","sourceRoot":"","sources":["../../../src/httpClientCache.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C,wBAAgB,0BAA0B,IAAI,UAAU,CAMvD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts new file mode 100644 index 0000000..7e487d9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts @@ -0,0 +1,136 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders implements HttpHeadersLike { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map new file mode 100644 index 0000000..e063133 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.d.ts","sourceRoot":"","sources":["../../../src/httpHeaders.ts"],"names":[],"mappings":"AAUA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,oBAAY,cAAc,GAAG;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC5D;;;;OAIG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACtC;;;;OAIG;IACH,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACpC;;OAEG;IACH,UAAU,IAAI,cAAc,CAAC;IAC7B;;OAEG;IACH,YAAY,IAAI,UAAU,EAAE,CAAC;IAC7B;;OAEG;IACH,WAAW,IAAI,MAAM,EAAE,CAAC;IACxB;;OAEG;IACH,YAAY,IAAI,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;IACzB;;;OAGG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,cAAc,CAAC;CAC9D;AAED,wBAAgB,iBAAiB,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,MAAM,IAAI,eAAe,CA+B7E;AAED;;GAEG;AACH,qBAAa,WAAY,YAAW,eAAe;IACjD,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAsC;gBAEtD,UAAU,CAAC,EAAE,cAAc;IASvC;;;;;OAKG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAOlE;;;;OAIG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKlD;;OAEG;IACI,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAI5C;;;;OAIG;IACI,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAM1C;;OAEG;IACI,UAAU,IAAI,cAAc;IAInC;;OAEG;IACI,YAAY,IAAI,UAAU,EAAE;IAQnC;;OAEG;IACI,WAAW,IAAI,MAAM,EAAE;IAS9B;;OAEG;IACI,YAAY,IAAI,MAAM,EAAE;IAS/B;;OAEG;IACI,MAAM,CAAC,OAAO,GAAE;QAAE,YAAY,CAAC,EAAE,OAAO,CAAA;KAAO,GAAG,cAAc;IAgBvE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAIzB;;OAEG;IACI,KAAK,IAAI,WAAW;CAQ5B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts new file mode 100644 index 0000000..5e3613d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts @@ -0,0 +1,78 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; +} +/** + * The flattened response to a REST call. + * Contains the underlying {@link HttpOperationResponse} as well as + * the merged properties of the `parsedBody`, `parsedHeaders`, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + /** + * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body. + */ + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map new file mode 100644 index 0000000..96af4cc --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.d.ts","sourceRoot":"","sources":["../../../src/httpOperationResponse.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;CAC1B;AAED,OAAO,CAAC,MAAM,CAAC;IACb;;;OAGG;IACH,UAAU,IAAI;KAAG;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAsB,SAAQ,YAAY;IACzD;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAEvC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;OAEG;IACH,UAAU,CAAC,EAAE,GAAG,CAAC;IAEjB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;CAC5C;AAED;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,SAAS,EAAE,qBAAqB,CAAC;IAEjC;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts new file mode 100644 index 0000000..7a13ab2 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map new file mode 100644 index 0000000..879719d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.d.ts","sourceRoot":"","sources":["../../../src/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,oBAAoB;IAC9B;;OAEG;IACH,GAAG,IAAA;IAEH;;OAEG;IACH,KAAK,IAAA;IAEL;;OAEG;IACH,OAAO,IAAA;IAEP;;OAEG;IACH,IAAI,IAAA;CACL"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts new file mode 100644 index 0000000..cd26a55 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map new file mode 100644 index 0000000..22fa885 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.d.ts","sourceRoot":"","sources":["../../../src/httpPipelineLogger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,eAAe,EAAE,oBAAoB,CAAC;IAEtC;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,yBAA0B,YAAW,kBAAkB;IAK/C,eAAe,EAAE,oBAAoB;IAJxD;;;OAGG;gBACgB,eAAe,EAAE,oBAAoB;IAExD;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAgB3D"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/index.d.ts b/node_modules/@azure/core-http/types/latest/src/index.d.ts new file mode 100644 index 0000000..eea1ac1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/index.d.ts @@ -0,0 +1,52 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, } from "./webResource"; +export { CommonResponse, CommonRequestInit, CommonRequestInfo } from "./nodeFetchHttpClient"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationOptions, OperationRequestOptions, operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { ServiceClient, ServiceClientOptions, flattenResponse, createPipelineFromOptions, ProxySettings, ProxyOptions, } from "./serviceClient"; +export { PipelineOptions, InternalPipelineOptions } from "./pipelineOptions"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { LogPolicyOptions, logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryOptions, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy, RedirectOptions } from "./policies/redirectPolicy"; +export { keepAlivePolicy, KeepAliveOptions } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, UserAgentOptions, TelemetryInfo, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, DeserializationOptions, deserializeResponseBody, DeserializationContentTypes, } from "./policies/deserializationPolicy"; +export { tracingPolicy, TracingPolicyOptions } from "./policies/tracingPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isDuration, } from "./util/utils"; +export { isNode } from "@azure/core-util"; +export { URLBuilder, URLQuery } from "./url"; +export { AbortSignalLike } from "@azure/abort-controller"; +export { delay } from "@azure/core-util"; +export { createSpanFunction, SpanConfig } from "./createSpanLegacy"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from "@azure/core-auth"; +export { AccessTokenCache, ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from "./util/serializer.common"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/index.d.ts.map b/node_modules/@azure/core-http/types/latest/src/index.d.ts.map new file mode 100644 index 0000000..adfa925 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":";AAMA,OAAO,EACL,WAAW,EACX,eAAe,EACf,eAAe,EACf,qBAAqB,EACrB,WAAW,EACX,cAAc,EACd,kBAAkB,EAClB,qBAAqB,GACtB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,uBAAuB,CAAC;AAC7F,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACzF,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC5F,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EACL,gBAAgB,EAChB,uBAAuB,EACvB,oCAAoC,GACrC,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACrB,aAAa,GACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EACL,aAAa,EACb,oBAAoB,EACpB,eAAe,EACf,yBAAyB,EACzB,aAAa,EACb,YAAY,GACb,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAC7E,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAE,gBAAgB,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACpB,wBAAwB,GACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,YAAY,EAAE,SAAS,EAAE,MAAM,mCAAmC,CAAC;AACpG,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAC/E,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,eAAe,EACf,wBAAwB,EACxB,gBAAgB,EAChB,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,qBAAqB,EACrB,sBAAsB,EACtB,uBAAuB,EACvB,2BAA2B,GAC5B,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAC/E,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,kBAAkB,EAClB,cAAc,EACd,MAAM,EACN,UAAU,EACV,eAAe,EACf,cAAc,EACd,gBAAgB,EAChB,UAAU,EACV,iBAAiB,EACjB,wBAAwB,EACxB,UAAU,EACV,iBAAiB,EACjB,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EACT,eAAe,EACf,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC,OAAO,EAAE,kBAAkB,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAGpE,OAAO,EAAE,eAAe,EAAE,eAAe,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpG,OAAO,EAAE,gBAAgB,EAAE,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAC5F,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,MAAM,iCAAiC,CAAC;AAC7F,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAClF,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAE1D,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/log.d.ts b/node_modules/@azure/core-http/types/latest/src/log.d.ts new file mode 100644 index 0000000..d9ad771 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/log.d.ts @@ -0,0 +1,2 @@ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=log.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/log.d.ts.map b/node_modules/@azure/core-http/types/latest/src/log.d.ts.map new file mode 100644 index 0000000..0a002fa --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/log.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../../../src/log.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,MAAM,qCAAkC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts new file mode 100644 index 0000000..1ba0e47 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts @@ -0,0 +1,64 @@ +/// +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { Transform } from "stream"; +import { TransferProgressEvent, WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * String URLs used when calling to `fetch()`. + */ +export declare type CommonRequestInfo = string; +/** + * An object containing information about the outgoing HTTP request. + */ +export declare type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; +/** + * An object containing information about the incoming HTTP response. + */ +export declare type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; +export declare class ReportTransform extends Transform { + private progressCallback; + private loadedBytes; + _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void; + constructor(progressCallback: (progress: TransferProgressEvent) => void); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +/** + * An HTTP client that uses `node-fetch`. + */ +export declare class NodeFetchHttpClient implements HttpClient { + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + sendRequest(httpRequest: WebResourceLike): Promise; + private proxyAgentMap; + private keepAliveAgents; + private getOrCreateAgent; + /** + * Uses `node-fetch` to perform the request. + */ + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + /** + * Prepares a request based on the provided web resource. + */ + prepareRequest(httpRequest: WebResourceLike): Promise>; + /** + * Process an HTTP response. + */ + processRequest(_operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map new file mode 100644 index 0000000..5462c08 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../../src/nodeFetchHttpClient.ts"],"names":[],"mappings":";;AAMA,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAE7D,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAC7C,OAAO,EAAE,qBAAqB,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEvE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAuBhE;;GAEG;AACH,oBAAY,iBAAiB,GAAG,MAAM,CAAC;AAEvC;;GAEG;AACH,oBAAY,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,GAAG,QAAQ,CAAC,GAAG;IACjF,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;CACd,CAAC;AAEF;;GAEG;AACH,oBAAY,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC,GAAG;IAC7E,IAAI,EAAE,GAAG,CAAC;IACV,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;CACf,CAAC;AAEF,qBAAa,eAAgB,SAAQ,SAAS;IAShC,OAAO,CAAC,gBAAgB;IARpC,OAAO,CAAC,WAAW,CAAa;IAChC,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;gBAOrE,gBAAgB,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI;CAGhF;AAiBD;;GAEG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,eAAe,CAQ9D;AAED;;GAEG;AACH,qBAAa,mBAAoB,YAAW,UAAU;IACpD;;;;OAIG;IACG,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA4L/E,OAAO,CAAC,aAAa,CAAsC;IAC3D,OAAO,CAAC,eAAe,CAAkB;IAEzC,OAAO,CAAC,gBAAgB;IAqDxB;;OAEG;IAEG,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIxF;;OAEG;IACG,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAWjF;;OAEG;IACG,cAAc,CAAC,kBAAkB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAG/E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts new file mode 100644 index 0000000..7eadcda --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map new file mode 100644 index 0000000..341dc04 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.d.ts","sourceRoot":"","sources":["../../../src/operationArguments.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,CAAC,aAAa,EAAE,MAAM,GAAG,GAAG,CAAC;IAE7B;;OAEG;IACH,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts new file mode 100644 index 0000000..6ea66e6 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts @@ -0,0 +1,56 @@ +import { RequestOptionsBase, TransferProgressEvent } from "./webResource"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationTracingOptions } from "@azure/core-tracing"; +/** + * The base options type for all operations. + */ +export interface OperationOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: OperationRequestOptions; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options that allow configuring the handling of HTTP requests made by an SDK operation. + */ +export interface OperationRequestOptions { + /** + * User defined custom request headers that will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); +} +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export declare function operationOptionsToRequestOptionsBase(opts: T): RequestOptionsBase; +//# sourceMappingURL=operationOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map new file mode 100644 index 0000000..31a30b9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationOptions.d.ts","sourceRoot":"","sources":["../../../src/operationOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAC;AAC1E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE,uBAAuB,CAAC;IACzC;;OAEG;IACH,cAAc,CAAC,EAAE,uBAAuB,CAAC;CAC1C;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;CAC9E;AAED;;;;GAIG;AACH,wBAAgB,oCAAoC,CAAC,CAAC,SAAS,gBAAgB,EAC7E,IAAI,EAAE,CAAC,GACN,kBAAkB,CAgBpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts new file mode 100644 index 0000000..4f0aa66 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts @@ -0,0 +1,54 @@ +import { Mapper } from "./serializer"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +/** + * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values. + */ +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map new file mode 100644 index 0000000..70d5e72 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.d.ts","sourceRoot":"","sources":["../../../src/operationParameter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE;;GAEG;AACH,oBAAY,aAAa,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,aAAa,CAAA;CAAE,CAAC;AAE1F;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,EAAE,aAAa,CAAC;IAE7B;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,kBAAkB;IAC/D;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,kBAAkB;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,qBAAqB,CAAC;CAC1C;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,kBAAkB,GAAG,MAAM,CAEhF;AAED,wBAAgB,8BAA8B,CAC5C,aAAa,EAAE,aAAa,EAC5B,MAAM,EAAE,MAAM,GACb,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts new file mode 100644 index 0000000..7e3d06b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts @@ -0,0 +1,19 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; + /** + * Indicates if this is an error response + */ + isError?: boolean; +} +//# sourceMappingURL=operationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map new file mode 100644 index 0000000..c6c21cf --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.d.ts","sourceRoot":"","sources":["../../../src/operationResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts new file mode 100644 index 0000000..3bc5928 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts @@ -0,0 +1,77 @@ +import { Serializer } from "./serializer"; +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { HttpMethods } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +/** + * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The media type of the request body. + * This value can be used to aide in serialization if it is provided. + */ + readonly mediaType?: "json" | "xml" | "form" | "binary" | "multipart" | "text" | "unknown" | string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export declare function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set; +//# sourceMappingURL=operationSpec.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map new file mode 100644 index 0000000..45e8678 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.d.ts","sourceRoot":"","sources":["../../../src/operationSpec.ts"],"names":[],"mappings":"AAGA,OAAO,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,WAAW,CAAC;IAEjC;;;;OAIG;IACH,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAE9B;;;OAGG;IACH,QAAQ,CAAC,SAAS,CAAC,EACf,MAAM,GACN,KAAK,GACL,MAAM,GACN,QAAQ,GACR,WAAW,GACX,MAAM,GACN,SAAS,GACT,MAAM,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAE1C;;OAEG;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IAE9D;;OAEG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,aAAa,CAAC,uBAAuB,CAAC,CAAC;IAElE;;;OAGG;IACH,QAAQ,CAAC,gBAAgB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAE9D;;;OAGG;IACH,QAAQ,CAAC,kBAAkB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAEhE;;;OAGG;IACH,QAAQ,CAAC,SAAS,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,iBAAiB,CAAA;KAAE,CAAC;CACnE;AAED;;;GAGG;AACH,wBAAgB,4BAA4B,CAAC,aAAa,EAAE,aAAa,GAAG,GAAG,CAAC,MAAM,CAAC,CAYtF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts new file mode 100644 index 0000000..790b1b0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts @@ -0,0 +1,63 @@ +import { DeserializationOptions } from "./policies/deserializationPolicy"; +import { HttpClient } from "./httpClient"; +import { KeepAliveOptions } from "./policies/keepAlivePolicy"; +import { LogPolicyOptions } from "./policies/logPolicy"; +import { ProxyOptions } from "./serviceClient"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RetryOptions } from "./policies/exponentialRetryPolicy"; +import { UserAgentOptions } from "./policies/userAgentPolicy"; +/** + * Defines options that are used to configure the HTTP pipeline for + * an SDK client. + */ +export interface PipelineOptions { + /** + * The HttpClient implementation to use for outgoing HTTP requests. Defaults + * to DefaultHttpClient. + */ + httpClient?: HttpClient; + /** + * Options that control how to retry failed requests. + */ + retryOptions?: RetryOptions; + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for how HTTP connections should be maintained for future + * requests. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; +} +/** + * Defines options that are used to configure internal options of + * the HTTP pipeline for an SDK client. + */ +export interface InternalPipelineOptions extends PipelineOptions { + /** + * Options to configure API response deserialization. + */ + deserializationOptions?: DeserializationOptions; + /** + * Options to configure request/response logging. + */ + loggingOptions?: LogPolicyOptions; + /** + * Configure whether to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Send JSON Array payloads as NDJSON. + */ + sendStreamingJson?: boolean; +} +//# sourceMappingURL=pipelineOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map new file mode 100644 index 0000000..3a23655 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pipelineOptions.d.ts","sourceRoot":"","sources":["../../../src/pipelineOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,sBAAsB,EAAE,MAAM,kCAAkC,CAAC;AAC1E,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAC9D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5D,OAAO,EAAE,YAAY,EAAE,MAAM,mCAAmC,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IAExB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;;OAGG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IAEpC;;OAEG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,eAAe;IAC9D;;OAEG;IACH,sBAAsB,CAAC,EAAE,sBAAsB,CAAC;IAEhD;;OAEG;IACH,cAAc,CAAC,EAAE,gBAAgB,CAAC;IAElC;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAE7B;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;CAC7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts new file mode 100644 index 0000000..d9c549a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts @@ -0,0 +1,33 @@ +import { TokenCredential } from "@azure/core-auth"; +import { RequestPolicyFactory } from "../policies/requestPolicy"; +interface TokenCyclerOptions { + /** + * The window of time before token expiration during which the token will be + * considered unusable due to risk of the token expiring before sending the + * request. + * + * This will only become meaningful if the refresh fails for over + * (refreshWindow - forcedRefreshWindow) milliseconds. + */ + forcedRefreshWindowInMs: number; + /** + * Interval in milliseconds to retry failed token refreshes. + */ + retryIntervalInMs: number; + /** + * The window of time before token expiration during which + * we will attempt to refresh the token. + */ + refreshWindowInMs: number; +} +export declare const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions; +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export declare function bearerTokenAuthenticationPolicy(credential: TokenCredential, scopes: string | string[]): RequestPolicyFactory; +export {}; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map new file mode 100644 index 0000000..9bdd1cd --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bearerTokenAuthenticationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/bearerTokenAuthenticationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAgC,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACjF,OAAO,EAGL,oBAAoB,EAErB,MAAM,2BAA2B,CAAC;AAgBnC,UAAU,kBAAkB;IAC1B;;;;;;;OAOG;IACH,uBAAuB,EAAE,MAAM,CAAC;IAChC;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAC1B;;;OAGG;IACH,iBAAiB,EAAE,MAAM,CAAC;CAC3B;AAGD,eAAO,MAAM,sBAAsB,EAAE,kBAIpC,CAAC;AA2KF;;;;;;GAMG;AACH,wBAAgB,+BAA+B,CAC7C,UAAU,EAAE,eAAe,EAC3B,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,GACxB,oBAAoB,CAgCtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts new file mode 100644 index 0000000..2290bed --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts @@ -0,0 +1,59 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { SerializerOptions } from "../util/serializer.common"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to configure API response deserialization. + */ +export interface DeserializationOptions { + /** + * Configures the expected content types for the deserialization of + * JSON and XML response bodies. + */ + expectedContentTypes: DeserializationContentTypes; +} +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +export declare const DefaultDeserializationOptions: DeserializationOptions; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + readonly xmlCharKey: string; + constructor(nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions, deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions); + sendRequest(request: WebResourceLike): Promise; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse, options?: SerializerOptions): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map new file mode 100644 index 0000000..f026254 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,iBAAiB,EAAe,MAAM,2BAA2B,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAKjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC;;;OAGG;IACH,oBAAoB,EAAE,2BAA2B,CAAC;CACnD;AAED;;;GAGG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAEhB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC;CAChB;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,2BAA2B,CAAC,EAAE,2BAA2B,EACzD,cAAc,CAAC,EAAE,iBAAiB,GACjC,oBAAoB,CAWtB;AAED,eAAO,MAAM,uBAAuB,UAAoC,CAAC;AACzE,eAAO,MAAM,sBAAsB,UAA8C,CAAC;AAElF,eAAO,MAAM,6BAA6B,EAAE,sBAK3C,CAAC;AAEF;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,SAAgB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3C,SAAgB,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1C,SAAgB,UAAU,EAAE,MAAM,CAAC;gBAGjC,UAAU,EAAE,aAAa,EACzB,oBAAoB,EAAE,oBAAoB,EAC1C,2BAA2B,CAAC,EAAE,2BAA2B,EACzD,cAAc,GAAE,iBAAsB;IAW3B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAOnF;AAsCD;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CACrC,gBAAgB,EAAE,MAAM,EAAE,EAC1B,eAAe,EAAE,MAAM,EAAE,EACzB,QAAQ,EAAE,qBAAqB,EAC/B,OAAO,GAAE,iBAAsB,GAC9B,OAAO,CAAC,qBAAqB,CAAC,CA4EhC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts new file mode 100644 index 0000000..f071e6e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts @@ -0,0 +1,13 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map new file mode 100644 index 0000000..cc9f4ce --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/disableResponseDecompressionPolicy.browser.ts"],"names":[],"mappings":"AAMA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAI7C;;;GAGG;AACH,wBAAgB,kCAAkC,IAAI,oBAAoB,CAMzE;AAED,qBAAa,kCAAmC,SAAQ,iBAAiB;gBAC3D,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAKvD,WAAW,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAGhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts new file mode 100644 index 0000000..949f8e2 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts @@ -0,0 +1,29 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map new file mode 100644 index 0000000..364460e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/disableResponseDecompressionPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C;;;GAGG;AACH,wBAAgB,kCAAkC,IAAI,oBAAoB,CAMzE;AAED;;;GAGG;AACH,qBAAa,kCAAmC,SAAQ,iBAAiB;IACvE;;;;;OAKG;gBAGS,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAIpE;;;;;OAKG;IACU,WAAW,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAI/E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 0000000..10fcb37 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,73 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export declare enum RetryMode { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + Exponential = 0 +} +/** + * Options that control how to retry failed requests. + */ +export interface RetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; + /** + * The amount of delay in milliseconds between retry attempts. Defaults to 30000 + * (30 seconds). The delay increases exponentially with each retry up to a maximum + * specified by maxRetryDelayInMs. + */ + retryDelayInMs?: number; + /** + * The maximum delay in milliseconds allowed before retrying an operation. Defaults + * to 90000 (90 seconds). + */ + maxRetryDelayInMs?: number; + /** + * Currently supporting only Exponential mode. + */ + mode?: RetryMode; +} +export declare const DefaultRetryOptions: RetryOptions; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map new file mode 100644 index 0000000..8d1cca6 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAYzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD;;;;;GAKG;AACH,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAYtB;AAED;;GAEG;AACH,oBAAY,SAAS;IACnB;;;OAGG;IACH,WAAW,IAAA;CACZ;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;;OAIG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;OAGG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAE3B;;OAEG;IACH,IAAI,CAAC,EAAE,SAAS,CAAC;CAClB;AAED,eAAO,MAAM,mBAAmB,EAAE,YAIjC,CAAC;AAEF;;GAEG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IAEzB;;;;;;;OAOG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM;IAUpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 0000000..827fd04 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,14 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map new file mode 100644 index 0000000..6c50dfe --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;GAGG;AACH,wBAAgB,6BAA6B,CAC3C,mBAAmB,SAA2B,GAC7C,oBAAoB,CAMtB;AAED,qBAAa,6BAA8B,SAAQ,iBAAiB;IAIhE,OAAO,CAAC,oBAAoB;gBAF5B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACrB,oBAAoB,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts new file mode 100644 index 0000000..dc137ca --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts @@ -0,0 +1,46 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how HTTP connections should be maintained for future + * requests. + */ +export interface KeepAliveOptions { + /** + * When true, connections will be kept alive for multiple requests. + * Defaults to true. + */ + enable: boolean; +} +/** + * By default, HTTP connections are maintained for future requests. + */ +export declare const DefaultKeepAliveOptions: KeepAliveOptions; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export declare function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory; +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export declare class KeepAlivePolicy extends BaseRequestPolicy { + private readonly keepAliveOptions; + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, keepAliveOptions: KeepAliveOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=keepAlivePolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map new file mode 100644 index 0000000..0852850 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keepAlivePolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/keepAlivePolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;OAGG;IACH,MAAM,EAAE,OAAO,CAAC;CACjB;AAED;;GAEG;AACH,eAAO,MAAM,uBAAuB,EAAE,gBAErC,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,gBAAgB,CAAC,EAAE,gBAAgB,GAAG,oBAAoB,CAMzF;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,iBAAiB;IAWlD,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IAVnC;;;;;;OAMG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACZ,gBAAgB,EAAE,gBAAgB;IAKrD;;;;;OAKG;IACU,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAInF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts new file mode 100644 index 0000000..98130d5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts @@ -0,0 +1,79 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Debugger } from "@azure/logger"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Sanitizer } from "../util/sanitizer"; +import { WebResourceLike } from "../webResource"; +/** + * Options to pass to the {@link logPolicy}. + * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged. + */ +export interface LogPolicyOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to: + * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id, + * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials, + * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, + * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding, + * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match, + * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent. + * + * Any headers specified in this field will be added to that list. + * Any other values will be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; + /** + * The Debugger (logger) instance to use for writing pipeline logs. + */ + logger?: Debugger; +} +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export declare function logPolicy(loggingOptions?: LogPolicyOptions): RequestPolicyFactory; +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export declare class LogPolicy extends BaseRequestPolicy { + logger: Debugger; + sanitizer: Sanitizer; + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames(): Set; + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames: Set); + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters(): Set; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters: Set); + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, { logger, allowedHeaderNames, allowedQueryParameters, }?: LogPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + private logRequest; + private logResponse; +} +//# sourceMappingURL=logPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map new file mode 100644 index 0000000..c79f751 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/logPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAC;AACzC,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;;;;;;;OAWG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;IAElC;;OAEG;IACH,MAAM,CAAC,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,SAAS,CAAC,cAAc,GAAE,gBAAqB,GAAG,oBAAoB,CAMrF;AAED;;GAEG;AACH,qBAAa,SAAU,SAAQ,iBAAiB;IAC9C,MAAM,EAAE,QAAQ,CAAC;IACjB,SAAS,EAAE,SAAS,CAAC;IAErB;;;;;;OAMG;IACH,IAAW,kBAAkB,IAAI,GAAG,CAAC,MAAM,CAAC,CAE3C;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB,CAAC,kBAAkB,EAAE,GAAG,CAAC,MAAM,CAAC,EAE5D;IAED;;;;OAIG;IACH,IAAW,sBAAsB,IAAI,GAAG,CAAC,MAAM,CAAC,CAE/C;IAED;;;;OAIG;IACH,IAAW,sBAAsB,CAAC,sBAAsB,EAAE,GAAG,CAAC,MAAM,CAAC,EAEpE;gBAGC,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,EACE,MAAwB,EACxB,kBAAuB,EACvB,sBAA2B,GAC5B,GAAE,gBAAqB;IAOnB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAO5E,OAAO,CAAC,UAAU;IAIlB,OAAO,CAAC,WAAW;CAKpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 0000000..3ff8522 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map new file mode 100644 index 0000000..8cdd338 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAOlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAQzD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 0000000..1fac888 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map new file mode 100644 index 0000000..3033090 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAYzD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts new file mode 100644 index 0000000..83cec20 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.native.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map new file mode 100644 index 0000000..f1b906f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.native.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.native.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAazD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts new file mode 100644 index 0000000..4993bca --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts @@ -0,0 +1,3 @@ +import { RequestPolicyFactory } from "./requestPolicy"; +export declare function ndJsonPolicy(): RequestPolicyFactory; +//# sourceMappingURL=ndJsonPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map new file mode 100644 index 0000000..cad74b0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ndJsonPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/ndJsonPolicy.ts"],"names":[],"mappings":"AAMA,OAAO,EAGL,oBAAoB,EAErB,MAAM,iBAAiB,CAAC;AAIzB,wBAAgB,YAAY,IAAI,oBAAoB,CAMnD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts new file mode 100644 index 0000000..41dab70 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map new file mode 100644 index 0000000..54a4ec0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAErF;AAED,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAK7D,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts new file mode 100644 index 0000000..77d535e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts @@ -0,0 +1,37 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export declare const globalNoProxyList: string[]; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export declare function proxyPolicy(proxySettings?: ProxySettings, options?: { + /** a list of patterns to override those loaded from NO_PROXY environment variable. */ + customNoProxyList?: string[]; +}): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + private customNoProxyList?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, proxySettings: ProxySettings, customNoProxyList?: string[] | undefined); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map new file mode 100644 index 0000000..37f48a0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/proxyPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AAEjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;GAGG;AACH,eAAO,MAAM,iBAAiB,EAAE,MAAM,EAAO,CAAC;AAyD9C;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,EAAE,CAWtC;AAED;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAiBpF;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CACzB,aAAa,CAAC,EAAE,aAAa,EAC7B,OAAO,CAAC,EAAE;IACR,sFAAsF;IACtF,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;CAC9B,GACA,oBAAoB,CAiBtB;AA2BD,qBAAa,WAAY,SAAQ,iBAAiB;IAIvC,aAAa,EAAE,aAAa;IACnC,OAAO,CAAC,iBAAiB,CAAC;gBAH1B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACtB,aAAa,EAAE,aAAa,EAC3B,iBAAiB,CAAC,sBAAU;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts new file mode 100644 index 0000000..0ffbe37 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts @@ -0,0 +1,33 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /** + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + /** + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map new file mode 100644 index 0000000..86218f9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/redirectPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAOjD;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IAEzB;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,sBAAsB,EAAE,eAGpC,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,cAAc,SAAK,GAAG,oBAAoB,CAMxE;AAED;;GAEG;AACH,qBAAa,cAAe,SAAQ,iBAAiB;IACmB,QAAQ,CAAC,UAAU;gBAA7E,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB,EAAW,UAAU,SAAK;IAIvF,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts new file mode 100644 index 0000000..71b4ac9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts @@ -0,0 +1,102 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +/** + * The underlying structure of a request policy. + */ +export interface RequestPolicy { + /** + * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made. + * @param httpRequest - {@link WebResourceLike} describing the request to be made. + */ + sendRequest(httpRequest: WebResourceLike): Promise; +} +/** + * The base class from which all request policies derive. + */ +export declare abstract class BaseRequestPolicy implements RequestPolicy { + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + readonly _nextPolicy: RequestPolicy; + /** + * The options that can be passed to a given request policy. + */ + readonly _options: RequestPolicyOptionsLike; + /** + * The main method to implement that manipulates a request/response. + */ + protected constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy: RequestPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options: RequestPolicyOptionsLike); + /** + * Sends a network request based on the given web resource. + * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made. + */ + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map new file mode 100644 index 0000000..80d5c39 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/requestPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAC/D,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,oBAAoB,GAAG;IACjC,MAAM,CAAC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,GAAG,aAAa,CAAC;CACrF,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;CAC3E;AAED;;GAEG;AACH,8BAAsB,iBAAkB,YAAW,aAAa;IAK5D;;OAEG;IACH,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC;;OAEG;IACH,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAX7C;;OAEG;IACH,SAAS;IACP;;OAEG;IACM,WAAW,EAAE,aAAa;IACnC;;OAEG;IACM,QAAQ,EAAE,wBAAwB;IAG7C;;;OAGG;aACa,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAEzF;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAIzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAGlE;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO,CAAC;IAEnD;;;;;OAKG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,oBAAoB;IACnB,OAAO,CAAC,OAAO,CAAC;gBAAR,OAAO,CAAC,gCAAoB;IAEhD;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAQzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAKlE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 0000000..8c70c79 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map new file mode 100644 index 0000000..ddd83eb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,wBAAgB,oBAAoB,CAAC,YAAY,SAAK,GAAG,oBAAoB,CAM5E;AAED,qBAAa,oBAAqB,SAAQ,iBAAiB;IAIvD,QAAQ,CAAC,aAAa;gBAFtB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACpB,aAAa,SAAK;IAKtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts new file mode 100644 index 0000000..69b4531 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts @@ -0,0 +1,20 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map new file mode 100644 index 0000000..f6ba5f4 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/signingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,wBAAwB,EAAE,MAAM,yCAAyC,CAAC;AACnF,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;;GAIG;AACH,wBAAgB,aAAa,CAC3B,sBAAsB,EAAE,wBAAwB,GAC/C,oBAAoB,CAMtB;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,iBAAiB;IAIzC,sBAAsB,EAAE,wBAAwB;gBAFvD,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACtB,sBAAsB,EAAE,wBAAwB;IAKzD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIxD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 0000000..11b68ec --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map new file mode 100644 index 0000000..1644efb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAYzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;;;;;GAOG;AACH,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAED;;;;;;GAMG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,gBAAgB,EAAE,MAAM,CAAC;gBAGvB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAapB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 0000000..2b43ae1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,35 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +declare type ResponseHandler = (httpRequest: WebResourceLike, response: HttpOperationResponse) => Promise; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export declare function throttlingRetryPolicy(): RequestPolicyFactory; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private _handleResponse; + private numberOfRetries; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _handleResponse?: ResponseHandler); + sendRequest(httpRequest: WebResourceLike): Promise; + private _defaultResponseHandler; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +export {}; +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map new file mode 100644 index 0000000..847b1ce --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,aAAK,eAAe,GAAG,CACrB,WAAW,EAAE,eAAe,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,OAAO,CAAC,qBAAqB,CAAC,CAAC;AAGpC;;;;;;;;;GASG;AACH,wBAAgB,qBAAqB,IAAI,oBAAoB,CAM5D;AAID;;;;;;;;GAQG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,eAAe,CAAK;gBAG1B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,eAAe,CAAC,EAAE,eAAe;IAMtB,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;YAYxE,uBAAuB;WAkCvB,qBAAqB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;WAS9D,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;CAWjF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts new file mode 100644 index 0000000..717316e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts @@ -0,0 +1,31 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Span } from "@azure/core-tracing"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to customize the tracing policy. + */ +export interface TracingPolicyOptions { + /** + * User agent used to better identify the outgoing requests traced by the tracing policy. + */ + userAgent?: string; +} +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory; +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export declare class TracingPolicy extends BaseRequestPolicy { + private userAgent?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + tryCreateSpan(request: WebResourceLike): Span | undefined; + private tryProcessError; + private tryProcessResponse; +} +//# sourceMappingURL=tracingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map new file mode 100644 index 0000000..ee3c796 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracingPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/tracingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,IAAI,EAML,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAQjD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,wBAAgB,aAAa,CAAC,cAAc,GAAE,oBAAyB,GAAG,oBAAoB,CAM7F;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,iBAAiB;IAClD,OAAO,CAAC,SAAS,CAAC,CAAS;gBAGzB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,cAAc,EAAE,oBAAoB;IAMzB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAqBlF,aAAa,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,GAAG,SAAS;IAsDzD,OAAO,CAAC,eAAe;IAgBvB,OAAO,CAAC,kBAAkB;CAe3B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts new file mode 100644 index 0000000..bb2c370 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts @@ -0,0 +1,49 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Telemetry information. Key/value pairs to include inside the User-Agent string. + */ +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +/** + * Options for adding user agent details to outgoing requests. + */ +export interface UserAgentOptions { + /** + * String prefix to add to the user agent for outgoing requests. + * Defaults to an empty string. + */ + userAgentPrefix?: string; +} +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export declare function getDefaultUserAgentValue(): string; +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptions; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map new file mode 100644 index 0000000..f5466df --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,sBAAsB,EAA2B,MAAM,yBAAyB,CAAC;AAG1F,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,aAAa,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAE7D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAwBD,eAAO,MAAM,6BAA6B,+BAAyB,CAAC;AAEpE;;;GAGG;AACH,wBAAgB,wBAAwB,IAAI,MAAM,CAKjD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAenF;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,iBAAiB;IAElD,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,oBAAoB;IACvC,SAAS,CAAC,SAAS,EAAE,MAAM;IAC3B,SAAS,CAAC,WAAW,EAAE,MAAM;gBAHpB,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,oBAAoB,EAC7B,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAKrE;;OAEG;IACH,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;CASnD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts new file mode 100644 index 0000000..3a98bef --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts @@ -0,0 +1,15 @@ +/// +/// +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; +import { HttpHeadersLike } from "./httpHeaders"; +import { ProxySettings } from "./serviceClient"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export declare function isUrlHttps(url: string): boolean; +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: tunnel.HttpsOverHttpsOptions): http.Agent | https.Agent; +//# sourceMappingURL=proxyAgent.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map new file mode 100644 index 0000000..0567f54 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.d.ts","sourceRoot":"","sources":["../../../src/proxyAgent.ts"],"names":[],"mappings":";;AAGA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD,oBAAY,UAAU,GAAG;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAA;CAAE,CAAC;AAC/E,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,aAAa,EAC5B,OAAO,CAAC,EAAE,eAAe,GACxB,UAAU,CA+BZ;AAED,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAG/C;AAED,wBAAgB,YAAY,CAC1B,cAAc,EAAE,OAAO,EACvB,YAAY,EAAE,OAAO,EACrB,aAAa,EAAE,MAAM,CAAC,qBAAqB,GAC1C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAU1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts new file mode 100644 index 0000000..515f1f5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts @@ -0,0 +1,26 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + /** + * CSV: Each pair of segments joined by a single comma. + */ + Csv = ",", + /** + * SSV: Each pair of segments joined by a single space character. + */ + Ssv = " ", + /** + * TSV: Each pair of segments joined by a single tab character. + */ + Tsv = "\t", + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + Pipes = "|", + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map new file mode 100644 index 0000000..2ecbfd1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.d.ts","sourceRoot":"","sources":["../../../src/queryCollectionFormat.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,qBAAqB;IAC/B;;OAEG;IACH,GAAG,MAAM;IACT;;OAEG;IACH,GAAG,MAAM;IACT;;OAEG;IACH,GAAG,OAAO;IACV;;OAEG;IACH,KAAK,MAAM;IACX;;OAEG;IACH,KAAK,UAAU;CAChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/restError.d.ts b/node_modules/@azure/core-http/types/latest/src/restError.d.ts new file mode 100644 index 0000000..fe37553 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/restError.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export declare class RestError extends Error { + /** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ + static readonly REQUEST_SEND_ERROR: string; + /** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ + static readonly PARSE_ERROR: string; + /** + * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT). + */ + code?: string; + /** + * The HTTP status code of the response, if one was returned. + */ + statusCode?: number; + /** + * Outgoing request. + */ + request?: WebResourceLike; + /** + * Incoming response. + */ + response?: HttpOperationResponse; + /** + * Any additional details. In the case of deserialization errors, can be the processed response. + */ + details?: unknown; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse); +} +//# sourceMappingURL=restError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map b/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map new file mode 100644 index 0000000..75a4cb7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.d.ts","sourceRoot":"","sources":["../../../src/restError.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAKhD;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;IAClC;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAwB;IAClE;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAiB;IAEpD;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B;;OAEG;IACH,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;gBAEhB,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB;CAkBnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serializer.d.ts b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts new file mode 100644 index 0000000..fb50b6a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts @@ -0,0 +1,361 @@ +import { SerializerOptions } from "./util/serializer.common"; +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export declare class Serializer { + /** + * The provided model mapper. + */ + readonly modelMappers: { + [key: string]: any; + }; + /** + * Whether the contents are XML or not. + */ + readonly isXML?: boolean | undefined; + constructor( + /** + * The provided model mapper. + */ + modelMappers?: { + [key: string]: any; + }, + /** + * Whether the contents are XML or not. + */ + isXML?: boolean | undefined); + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper: Mapper, value: unknown, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper: Mapper, object: unknown, objectName?: string, options?: SerializerOptions): any; + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper: Mapper, responseBody: unknown, objectName: string, options?: SerializerOptions): any; +} +/** + * Description of various value constraints such as integer ranges and string regex. + */ +export interface MapperConstraints { + /** + * The value should be less than or equal to the `InclusiveMaximum` value. + */ + InclusiveMaximum?: number; + /** + * The value should be less than the `ExclusiveMaximum` value. + */ + ExclusiveMaximum?: number; + /** + * The value should be greater than or equal to the `InclusiveMinimum` value. + */ + InclusiveMinimum?: number; + /** + * The value should be greater than the `InclusiveMinimum` value. + */ + ExclusiveMinimum?: number; + /** + * The length should be smaller than the `MaxLength`. + */ + MaxLength?: number; + /** + * The length should be bigger than the `MinLength`. + */ + MinLength?: number; + /** + * The value must match the pattern. + */ + Pattern?: RegExp; + /** + * The value must contain fewer items than the MaxItems value. + */ + MaxItems?: number; + /** + * The value must contain more items than the `MinItems` value. + */ + MinItems?: number; + /** + * The value must contain only unique items. + */ + UniqueItems?: true; + /** + * The value should be exactly divisible by the `MultipleOf` value. + */ + MultipleOf?: number; +} +/** + * Type of the mapper. Includes known mappers. + */ +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +/** + * The type of a simple mapper. + */ +export interface SimpleMapperType { + /** + * Name of the type of the property. + */ + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +/** + * Helps build a mapper that describes how to map a set of properties of an object based on other mappers. + * + * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`. + */ +export interface CompositeMapperType { + /** + * Name of the composite mapper type. + */ + name: "Composite"; + /** + * Use `className` to reference another type definition. + */ + className?: string; + /** + * Use `modelProperties` when the reference to the other type has been resolved. + */ + modelProperties?: { + [propertyName: string]: Mapper; + }; + /** + * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object. + */ + additionalProperties?: Mapper; + /** + * The name of the top-most parent scheme, the one that has no parents. + */ + uberParent?: string; + /** + * A polymorphic discriminator. + */ + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +/** + * Helps build a mapper that describes how to parse a sequence of mapped values. + */ +export interface SequenceMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Sequence"; + /** + * The mapper to use to map each one of the properties of the sequence. + */ + element: Mapper; +} +/** + * Helps build a mapper that describes how to parse a dictionary of mapped values. + */ +export interface DictionaryMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Dictionary"; + /** + * The mapper to use to map the value of each property in the dictionary. + */ + value: Mapper; +} +/** + * Helps build a mapper that describes how to parse an enum value. + */ +export interface EnumMapperType { + /** + * Name of the enum type mapper. + */ + name: "Enum"; + /** + * Values allowed by this mapper. + */ + allowedValues: any[]; +} +/** + * The base definition of a mapper. Can be used for XML and plain JavaScript objects. + */ +export interface BaseMapper { + /** + * Name for the xml element + */ + xmlName?: string; + /** + * Xml element namespace + */ + xmlNamespace?: string; + /** + * Xml element namespace prefix + */ + xmlNamespacePrefix?: string; + /** + * Determines if the current property should be serialized as an attribute of the parent xml element + */ + xmlIsAttribute?: boolean; + /** + * Determines if the current property should be serialized as the inner content of the xml element + */ + xmlIsMsText?: boolean; + /** + * Name for the xml elements when serializing an array + */ + xmlElementName?: string; + /** + * Whether or not the current property should have a wrapping XML element + */ + xmlIsWrapped?: boolean; + /** + * Whether or not the current property is readonly + */ + readOnly?: boolean; + /** + * Whether or not the current property is a constant + */ + isConstant?: boolean; + /** + * Whether or not the current property is required + */ + required?: boolean; + /** + * Whether or not the current property allows mull as a value + */ + nullable?: boolean; + /** + * The name to use when serializing + */ + serializedName?: string; + /** + * Type of the mapper + */ + type: MapperType; + /** + * Default value when one is not explicitly provided + */ + defaultValue?: any; + /** + * Constraints to test the current value against + */ + constraints?: MapperConstraints; +} +/** + * Mappers are definitions of the data models used in the library. + * These data models are part of the Operation or Client definitions in the responses or parameters. + */ +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +/** + * Used to disambiguate discriminated type unions. + * For example, if response can have many shapes but also includes a 'kind' field (or similar), + * that field can be used to determine how to deserialize the response to the correct type. + */ +export interface PolymorphicDiscriminator { + /** + * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`. + */ + serializedName: string; + /** + * Name to use on the resulting object instead of the original property name. + * Useful since the JSON property could be difficult to work with. + * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`. + */ + clientName: string; + /** + * It may contain any other property. + */ + [key: string]: string; +} +/** + * A mapper composed of other mappers. + */ +export interface CompositeMapper extends BaseMapper { + /** + * The type descriptor of the `CompositeMapper`. + */ + type: CompositeMapperType; +} +/** + * A mapper describing arrays. + */ +export interface SequenceMapper extends BaseMapper { + /** + * The type descriptor of the `SequenceMapper`. + */ + type: SequenceMapperType; +} +/** + * A mapper describing plain JavaScript objects used as key/value pairs. + */ +export interface DictionaryMapper extends BaseMapper { + /** + * The type descriptor of the `DictionaryMapper`. + */ + type: DictionaryMapperType; + /** + * Optionally, a prefix to add to the header collection. + */ + headerCollectionPrefix?: string; +} +/** + * A mapper describing an enum value. + */ +export interface EnumMapper extends BaseMapper { + /** + * The type descriptor of the `EnumMapper`. + */ + type: EnumMapperType; +} +/** + * An interface representing an URL parameter value. + */ +export interface UrlParameterValue { + /** + * The URL value. + */ + value: string; + /** + * Whether to keep or skip URL encoding. + */ + skipUrlEncoding: boolean; +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export declare function serializeObject(toSerialize: unknown): any; +/** + * String enum containing the string types of property mappers. + */ +export declare const MapperType: { + Date: "Date"; + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map new file mode 100644 index 0000000..d8ca0c7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.d.ts","sourceRoot":"","sources":["../../../src/serializer.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,0BAA0B,CAAC;AAIvF;;;;GAIG;AACH,qBAAa,UAAU;IAEnB;;OAEG;aACa,YAAY,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;IACpD;;OAEG;aACa,KAAK,CAAC;;IAPtB;;OAEG;IACa,YAAY,GAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAO;IACzD;;OAEG;IACa,KAAK,CAAC,qBAAS;IAGjC;;;;;;OAMG;IACH,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IAmE7E;;;;;;;;OAQG;IACH,SAAS,CACP,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,OAAO,EACf,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,GAAE,iBAAsB,GAC9B,GAAG;IA2FN;;;;;;;;OAQG;IACH,WAAW,CACT,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,OAAO,EACrB,UAAU,EAAE,MAAM,EAClB,OAAO,GAAE,iBAAsB,GAC9B,GAAG;CAmGP;AA6yBD;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,WAAW,CAAC,EAAE,IAAI,CAAC;IACnB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED;;GAEG;AACH,oBAAY,UAAU,GAClB,gBAAgB,GAChB,mBAAmB,GACnB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,CAAC;AAEnB;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,IAAI,EACA,WAAW,GACX,SAAS,GACT,WAAW,GACX,MAAM,GACN,UAAU,GACV,iBAAiB,GACjB,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,UAAU,GACV,MAAM,GACN,QAAQ,GACR,KAAK,CAAC;CACX;AAED;;;;GAIG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAErD;;OAEG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,wBAAwB,CAAC,EAAE,wBAAwB,CAAC;CACrD;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,aAAa,EAAE,GAAG,EAAE,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;IACjB;;OAEG;IACH,YAAY,CAAC,EAAE,GAAG,CAAC;IACnB;;OAEG;IACH,WAAW,CAAC,EAAE,iBAAiB,CAAC;CACjC;AAED;;;GAGG;AACH,oBAAY,MAAM,GAAG,UAAU,GAAG,eAAe,GAAG,cAAc,GAAG,gBAAgB,GAAG,UAAU,CAAC;AAEnG;;;;GAIG;AACH,MAAM,WAAW,wBAAwB;IACvC;;OAEG;IACH,cAAc,EAAE,MAAM,CAAC;IACvB;;;;OAIG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD;;OAEG;IACH,IAAI,EAAE,mBAAmB,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD;;OAEG;IACH,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD;;OAEG;IACH,IAAI,EAAE,oBAAoB,CAAC;IAC3B;;OAEG;IACH,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,UAAU;IAC5C;;OAEG;IACH,IAAI,EAAE,cAAc,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;CAC1B;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,WAAW,EAAE,OAAO,GAAG,GAAG,CAsBzD;AAaD;;GAEG;AAEH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;CAiBrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts new file mode 100644 index 0000000..7fb2de3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts @@ -0,0 +1,168 @@ +import { Mapper, Serializer } from "./serializer"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { ServiceCallback } from "./util/utils"; +import { TokenCredential } from "@azure/core-auth"; +import { HttpClient } from "./httpClient"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { InternalPipelineOptions } from "./pipelineOptions"; +import { OperationArguments } from "./operationArguments"; +import { OperationResponse } from "./operationResponse"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +/** + * Options to configure a proxy for outgoing requests (Node.js only). + */ +export interface ProxySettings { + /** + * The proxy's host address. + */ + host: string; + /** + * The proxy host's port. + */ + port: number; + /** + * The user name to authenticate with the proxy, if required. + */ + username?: string; + /** + * The password to authenticate with the proxy, if required. + */ + password?: string; +} +/** + * An alias of {@link ProxySettings} for future use. + */ +export declare type ProxyOptions = ProxySettings; +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-useragent" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * If specified, will be used to build the BearerTokenAuthenticationPolicy. + */ + credentialScopes?: string | string[]; +} +/** + * ServiceClient sends service requests and receives responses. + */ +export declare class ServiceClient { + /** + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials?: TokenCredential | ServiceClientCredentials, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export declare function createPipelineFromOptions(pipelineOptions: InternalPipelineOptions, authPolicyFactory?: RequestPolicyFactory): ServiceClientOptions; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map new file mode 100644 index 0000000..0d7b95f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.d.ts","sourceRoot":"","sources":["../../../src/serviceClient.ts"],"names":[],"mappings":"AAIA,OAAO,EAAqC,MAAM,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACjG,OAAO,EAEL,2BAA2B,EAE5B,MAAM,kCAAkC,CAAC;AAI1C,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAE9E,OAAO,EAEL,aAAa,EAGd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,aAAa,EAAgC,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EAEL,qBAAqB,EAErB,eAAe,EAEhB,MAAM,eAAe,CAAC;AACvB,OAAO,EAEL,oBAAoB,EAErB,MAAM,0BAA0B,CAAC;AAElC,OAAO,EAAE,eAAe,EAAE,MAAM,cAAc,CAAC;AAE/C,OAAO,EAAE,eAAe,EAAqB,MAAM,kBAAkB,CAAC;AAMtE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAgBlF;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,oBAAY,YAAY,GAAG,aAAa,CAAC;AAEzC;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,sBAAsB,CAAC,EACnB,oBAAoB,EAAE,GACtB,CAAC,CAAC,6BAA6B,EAAE,oBAAoB,EAAE,KAAK,IAAI,GAAG,oBAAoB,EAAE,CAAC,CAAC;IAC/F;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IACxC;;OAEG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB;;OAEG;IACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;IACpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC;IACxC;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC;;OAEG;IACH,2BAA2B,CAAC,EAAE,2BAA2B,CAAC;IAC1D;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,0BAA0B,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAChF;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,gBAAgB,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAC5D;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CACtC;AAED;;GAEG;AACH,qBAAa,aAAa;IACxB;;;OAGG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,qBAAqB,CAAuB;IAE7D,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAyB;IACjE,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAU;IAE3C;;;;OAIG;gBAED,WAAW,CAAC,EAAE,eAAe,GAAG,wBAAwB,EAExD,OAAO,CAAC,EAAE,oBAAoB;IA+EhC;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA8B7F;;;;;OAKG;IACG,oBAAoB,CACxB,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,QAAQ,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,GAC9B,OAAO,CAAC,YAAY,CAAC;CA0OzB;AAED,wBAAgB,oBAAoB,CAClC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,eAAe,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,GAC3B,IAAI,CAyGN;AAqFD;;;;;GAKG;AACH,wBAAgB,yBAAyB,CACvC,eAAe,EAAE,uBAAuB,EACxC,iBAAiB,CAAC,EAAE,oBAAoB,GACvC,oBAAoB,CAmFtB;AAED,oBAAY,cAAc,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAAC;AAE7D;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,cAAc,CAYhG;AAiBD,wBAAgB,0CAA0C,CACxD,aAAa,EAAE,aAAa,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,eAAe,EAAE,MAAM,EACvB,UAAU,EAAE,UAAU,GACrB,GAAG,CAmEL;AA6BD;;;;;GAKG;AACH,wBAAgB,eAAe,CAC7B,SAAS,EAAE,qBAAqB,EAChC,YAAY,EAAE,iBAAiB,GAAG,SAAS,GAC1C,YAAY,CAyEd"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/url.d.ts b/node_modules/@azure/core-http/types/latest/src/url.d.ts new file mode 100644 index 0000000..9e3359a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/url.d.ts @@ -0,0 +1,160 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Get the keys of the query string. + */ + keys(): string[]; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/url.d.ts.map b/node_modules/@azure/core-http/types/latest/src/url.d.ts.map new file mode 100644 index 0000000..4604c58 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/url.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"url.d.ts","sourceRoot":"","sources":["../../../src/url.ts"],"names":[],"mappings":"AAOA;;GAEG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAA2D;IAErF;;OAEG;IACI,GAAG,IAAI,OAAO;IAIrB;;OAEG;IACI,IAAI,IAAI,MAAM,EAAE;IAIvB;;;;OAIG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,cAAc,EAAE,OAAO,GAAG,IAAI;IAgBhE;;;OAGG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIhE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAoBzB;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CA0D5C;AAED;;GAEG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAAuB;IAErC;;;OAGG;IACI,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQlD;;OAEG;IACI,SAAS,IAAI,MAAM,GAAG,SAAS;IAItC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ9C;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAQvD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAgB9C;;;OAGG;IACI,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAkBjD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;OAEG;IACI,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQhD;;;;OAIG;IACI,iBAAiB,CAAC,kBAAkB,EAAE,MAAM,EAAE,mBAAmB,EAAE,OAAO,GAAG,IAAI;IASxF;;;OAGG;IACI,sBAAsB,CAAC,kBAAkB,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIxF;;OAEG;IACI,QAAQ,IAAI,MAAM,GAAG,SAAS;IAIrC;;OAEG;IACH,OAAO,CAAC,GAAG;IAsCX;;;OAGG;IACI,QAAQ,IAAI,MAAM;IA6BzB;;;OAGG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;IAUlE;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU;CAK9C;AAED,aAAK,iBAAiB,GAAG,QAAQ,GAAG,gBAAgB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEnG,aAAK,YAAY,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;AAElE,qBAAa,QAAQ;aACgB,IAAI,EAAE,MAAM;aAAkB,IAAI,EAAE,YAAY;gBAAhD,IAAI,EAAE,MAAM,EAAkB,IAAI,EAAE,YAAY;WAErE,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI9B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CAG5C;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAOlE;AAED;;GAEG;AACH,qBAAa,YAAY;IAMJ,QAAQ,CAAC,KAAK,EAAE,MAAM;IALzC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,aAAa,EAAE,iBAAiB,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,QAAQ,GAAG,SAAS,CAAC;gBAER,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,iBAAiB;IAMpE;;;OAGG;IACI,OAAO,IAAI,QAAQ,GAAG,SAAS;IAItC;;OAEG;IACI,IAAI,IAAI,OAAO;CAmCvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts new file mode 100644 index 0000000..e54d759 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map new file mode 100644 index 0000000..1d1a90c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/base64.browser.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAMzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAOtD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts new file mode 100644 index 0000000..63e9630 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map new file mode 100644 index 0000000..2b42c1c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.d.ts","sourceRoot":"","sources":["../../../../src/util/base64.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEtD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts new file mode 100644 index 0000000..b474b27 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts @@ -0,0 +1,72 @@ +/** + * A set of constants used internally when processing requests. + */ +export declare const Constants: { + /** + * The core-http version + */ + coreHttpVersion: string; + /** + * Specifies HTTP. + */ + HTTP: string; + /** + * Specifies HTTPS. + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + ServiceUnavailable: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map new file mode 100644 index 0000000..04b9b88 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../../src/util/constants.ts"],"names":[],"mappings":"AAEA;;GAEG;AACH,eAAO,MAAM,SAAS;IACpB;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;;QAID;;WAEG;;;;;;;;;;;;;;;IAiBL;;OAEG;;QAED;;WAEG;;;QAKH;;;;WAIG;;QAGH;;WAEG;;;CAGN,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts new file mode 100644 index 0000000..25fd2c8 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts @@ -0,0 +1,40 @@ +import { HttpOperationResponse } from ".."; +export declare const DEFAULT_CLIENT_RETRY_COUNT = 3; +export declare const DEFAULT_CLIENT_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; +export declare function isNumber(n: unknown): n is number; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export declare function shouldRetry(retryLimit: number, predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean, retryData: RetryData, response?: HttpOperationResponse, error?: RetryError): boolean; +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export declare function updateRetryData(retryOptions: { + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; +}, retryData?: RetryData, err?: RetryError): RetryData; +//# sourceMappingURL=exponentialBackoffStrategy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map new file mode 100644 index 0000000..a218aab --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialBackoffStrategy.d.ts","sourceRoot":"","sources":["../../../../src/util/exponentialBackoffStrategy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,IAAI,CAAC;AAE3C,eAAO,MAAM,0BAA0B,IAAI,CAAC;AAE5C,eAAO,MAAM,6BAA6B,QAAY,CAAC;AACvD,eAAO,MAAM,iCAAiC,QAAY,CAAC;AAC3D,eAAO,MAAM,iCAAiC,QAAW,CAAC;AAE1D,wBAAgB,QAAQ,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,MAAM,CAEhD;AACD,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CACzB,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,qBAAqB,EAAE,KAAK,CAAC,EAAE,UAAU,KAAK,OAAO,EAC5E,SAAS,EAAE,SAAS,EACpB,QAAQ,CAAC,EAAE,qBAAqB,EAChC,KAAK,CAAC,EAAE,UAAU,GACjB,OAAO,CAMT;AAED;;;;;;;GAOG;AACH,wBAAgB,eAAe,CAC7B,YAAY,EAAE;IAAE,aAAa,EAAE,MAAM,CAAC;IAAC,gBAAgB,EAAE,MAAM,CAAC;IAAC,gBAAgB,EAAE,MAAM,CAAA;CAAE,EAC3F,SAAS,GAAE,SAA+C,EAC1D,GAAG,CAAC,EAAE,UAAU,GACf,SAAS,CAyBX"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts new file mode 100644 index 0000000..e136c53 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts @@ -0,0 +1,2 @@ +export declare const custom: {}; +//# sourceMappingURL=inspect.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map new file mode 100644 index 0000000..d35d335 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/inspect.browser.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,MAAM,IAAK,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts new file mode 100644 index 0000000..cd664b8 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts @@ -0,0 +1,2 @@ +export declare const custom: symbol; +//# sourceMappingURL=inspect.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map new file mode 100644 index 0000000..37ed38b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.d.ts","sourceRoot":"","sources":["../../../../src/util/inspect.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,MAAM,QAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts new file mode 100644 index 0000000..c00416a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts @@ -0,0 +1,25 @@ +export interface SanitizerOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; +} +export declare class Sanitizer { + allowedHeaderNames: Set; + allowedQueryParameters: Set; + constructor({ allowedHeaderNames, allowedQueryParameters }?: SanitizerOptions); + sanitize(obj: unknown): string; + private sanitizeHeaders; + private sanitizeQuery; + private sanitizeObject; + private sanitizeUrl; +} +//# sourceMappingURL=sanitizer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map new file mode 100644 index 0000000..e5cf8f5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"sanitizer.d.ts","sourceRoot":"","sources":["../../../../src/util/sanitizer.ts"],"names":[],"mappings":"AAMA,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;CACnC;AAkDD,qBAAa,SAAS;IACb,kBAAkB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IAChC,sBAAsB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;gBAE/B,EAAE,kBAAuB,EAAE,sBAA2B,EAAE,GAAE,gBAAqB;IAapF,QAAQ,CAAC,GAAG,EAAE,OAAO,GAAG,MAAM;IA2CrC,OAAO,CAAC,eAAe;IAIvB,OAAO,CAAC,aAAa;IAIrB,OAAO,CAAC,cAAc;IAsBtB,OAAO,CAAC,WAAW;CAsBpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts new file mode 100644 index 0000000..cd9881b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts @@ -0,0 +1,26 @@ +/** + * Default key used to access the XML attributes. + */ +export declare const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export declare const XML_CHARKEY = "_"; +/** + * Options to govern behavior of xml parser and builder. + */ +export interface SerializerOptions { + /** + * indicates the name of the root element in the resulting XML when building XML. + */ + rootName?: string; + /** + * indicates whether the root element is to be included or not in the output when parsing XML. + */ + includeRoot?: boolean; + /** + * key used to access the XML value content when parsing XML. + */ + xmlCharKey?: string; +} +//# sourceMappingURL=serializer.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map new file mode 100644 index 0000000..c6abf32 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.common.d.ts","sourceRoot":"","sources":["../../../../src/util/serializer.common.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B;;GAEG;AACH,eAAO,MAAM,WAAW,MAAM,CAAC;AAE/B;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts new file mode 100644 index 0000000..5ebc348 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts @@ -0,0 +1,5 @@ +/** + * Maximum number of retries for the throttling retry policy + */ +export declare const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map new file mode 100644 index 0000000..5403e4d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryStrategy.d.ts","sourceRoot":"","sources":["../../../../src/util/throttlingRetryStrategy.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,8BAA8B,IAAI,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts new file mode 100644 index 0000000..aea2a86 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts @@ -0,0 +1,127 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: unknown): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param err - The error occurred if any, while executing the request; otherwise null. + * @param result - The deserialized response body if an error did not occur. + * @param request - The raw/actual request sent to the server if an error did not occur. + * @param response - The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): (cb: Function) => void; +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): (cb: ServiceCallback) => void; +export declare function prepareXMLRootList(obj: unknown, elementName: string, xmlNamespaceKey?: string, xmlNamespace?: string): { + [s: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: unknown): boolean; +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map new file mode 100644 index 0000000..e76201b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/util/utils.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAOjD;;;;;GAKG;AACH,wBAAgB,UAAU,CAAC,UAAU,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,GAAG,OAAO,CAEpE;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,qBAAqB,GAAG,GAAG,CAMlE;AAED;;;;;;GAMG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAMtE;AAED;;;;;GAKG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED;;;;;;;;GAQG;AACH,wBAAgB,2BAA2B,CACzC,gBAAgB,EAAE,KAAK,CAAC,GAAG,CAAC,EAC5B,SAAS,EAAE,OAAO,GACjB,OAAO,CAAC,GAAG,CAAC,CAMd;AAED;;GAEG;AACH,MAAM,WAAW,eAAe,CAAC,OAAO;IACtC;;;;;;OAMG;IACH,CACE,GAAG,EAAE,KAAK,GAAG,SAAS,GAAG,IAAI,EAC7B,MAAM,CAAC,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,GAC/B,IAAI,CAAC;CACT;AAED;;;;;GAKG;AAEH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,KAAK,IAAI,CAgB/E;AAED;;;;GAIG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EACxC,OAAO,EAAE,OAAO,CAAC,qBAAqB,CAAC,GACtC,CAAC,EAAE,EAAE,eAAe,CAAC,CAAC,CAAC,KAAK,IAAI,CAalC;AAED,wBAAgB,kBAAkB,CAChC,GAAG,EAAE,OAAO,EACZ,WAAW,EAAE,MAAM,EACnB,eAAe,CAAC,EAAE,MAAM,EACxB,YAAY,CAAC,EAAE,MAAM,GACpB;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAYtB;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,eAAe,EAAE,OAAO,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,IAAI,CAS9E;AAKD;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,GACnB,MAAM,GAAG,SAAS,CAEpB;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAEvD;AAED,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAOpE;AAED;;GAEG;AACH,oBAAY,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts new file mode 100644 index 0000000..c92f70d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts @@ -0,0 +1,4 @@ +import { SerializerOptions } from "./serializer.common"; +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +export declare function stringifyXML(content: unknown, opts?: SerializerOptions): string; +//# sourceMappingURL=xml.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map new file mode 100644 index 0000000..767a1e7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/xml.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,qBAAqB,CAAC;AA6ClF,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,GAAE,iBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,CAwBhF;AA6FD,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,GAAE,iBAAsB,GAAG,MAAM,CAWnF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts new file mode 100644 index 0000000..088841b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts @@ -0,0 +1,14 @@ +import { SerializerOptions } from "./serializer.common"; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export declare function stringifyXML(obj: unknown, opts?: SerializerOptions): string; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +//# sourceMappingURL=xml.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map new file mode 100644 index 0000000..e06390c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.d.ts","sourceRoot":"","sources":["../../../../src/util/xml.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,qBAAqB,CAAC;AA0DlF;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,OAAO,EAAE,IAAI,GAAE,iBAAsB,GAAG,MAAM,CAK/E;AAED;;;;GAIG;AACH,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,GAAE,iBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,CAiBhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/webResource.d.ts b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts new file mode 100644 index 0000000..a036e53 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts @@ -0,0 +1,445 @@ +/// +import { Context, SpanOptions } from "@azure/core-tracing"; +import { HttpHeadersLike } from "./httpHeaders"; +import { Mapper } from "./serializer"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { OperationSpec } from "./operationSpec"; +import { ProxySettings } from "./serviceClient"; +import { SerializerOptions } from "./util/serializer.common"; +/** + * List of supported HTTP methods. + */ +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +/** + * Possible HTTP request body types + */ +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * A description of a HTTP request to be made to a remote server. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * A unique identifier for the request. Used for logging and tracing. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: unknown): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export declare class WebResource implements WebResourceLike { + /** + * URL of the outgoing request. + */ + url: string; + /** + * HTTP method to use. + */ + method: HttpMethods; + /** + * Request body. + */ + body?: any; + /** + * HTTP headers. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * Query added to the URL. + */ + query?: { + [key: string]: any; + }; + /** + * Specification of the HTTP request. + */ + operationSpec?: OperationSpec; + /** + * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination. + */ + withCredentials: boolean; + /** + * How long to wait in milliseconds before aborting the request. + */ + timeout: number; + /** + * What proxy to use, if necessary. + */ + proxySettings?: ProxySettings; + /** + * Whether to keep the HTTP connections alive throughout requests. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Unique identifier of the outgoing request. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating Spans. + */ + tracingContext?: Context; + constructor(url?: string, method?: HttpMethods, body?: unknown, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, decompressResponse?: boolean, streamResponseStatusCodes?: Set); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +/** + * Options to prepare an outgoing HTTP request. + */ +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: `{ "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } }` + * - query-parameter-value in "string" format: `{ "query-parameter-name": "query-parameter-value"}`. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}` + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: `{ "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } }` + * - path-parameter-value in "string" format: `{ "path-parameter-name": "path-parameter-value" }`. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * Form data, used to build the request body. + */ + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: Record; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Allows keeping track of the progress of uploading the outgoing request. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Allows keeping track of the progress of downloading the incoming response. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + /** + * Value of the parameter. + */ + value: any; + /** + * Disables URL encoding if set to true. + */ + skipUrlEncoding: boolean; + /** + * Parameter values may contain any other property. + */ + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * May contain other properties. + */ + [key: string]: any; + /** + * Options to override XML parsing/building behavior. + */ + serializerOptions?: SerializerOptions; +} +//# sourceMappingURL=webResource.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map new file mode 100644 index 0000000..369a753 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.d.ts","sourceRoot":"","sources":["../../../src/webResource.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAC3D,OAAO,EAAe,eAAe,EAAqB,MAAM,eAAe,CAAC;AAChF,OAAO,EAAE,MAAM,EAAc,MAAM,cAAc,CAAC;AAClD,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAG7D;;GAEG;AACH,oBAAY,WAAW,GACnB,KAAK,GACL,KAAK,GACL,MAAM,GACN,QAAQ,GACR,OAAO,GACP,MAAM,GACN,SAAS,GACT,OAAO,CAAC;AAEZ;;GAEG;AACH,oBAAY,eAAe,GACvB,IAAI,GACJ,MAAM,GACN,WAAW,GACX,eAAe,GACf,CAAC,MAAM,MAAM,CAAC,cAAc,CAAC,CAAC;AAElC;;GAEG;AACH,oBAAY,qBAAqB,GAAG;IAClC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACxC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;;;OAIG;IACH,yBAAyB,IAAI,IAAI,CAAC;IAElC;;OAEG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,CAAC;IACzD;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,OAAO,GAAG,MAAM,IAAI,eAAe,CAuB5E;AAED;;;;;GAKG;AACH,qBAAa,WAAY,YAAW,eAAe;IACjD;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACxC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;OAEG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAE1B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;gBAGvB,GAAG,CAAC,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,WAAW,EACpB,IAAI,CAAC,EAAE,OAAO,EACd,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,EAC9B,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,GAAG,eAAe,EAClD,kBAAkB,CAAC,EAAE,OAAO,EAC5B,eAAe,CAAC,EAAE,OAAO,EACzB,WAAW,CAAC,EAAE,eAAe,EAC7B,OAAO,CAAC,EAAE,MAAM,EAChB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC5D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC9D,aAAa,CAAC,EAAE,aAAa,EAC7B,SAAS,CAAC,EAAE,OAAO,EACnB,kBAAkB,CAAC,EAAE,OAAO,EAC5B,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC;IAqBzC;;;;OAIG;IACH,yBAAyB,IAAI,IAAI;IASjC;;;;OAIG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,WAAW;IAwNpD;;;OAGG;IACH,KAAK,IAAI,WAAW;CAqCrB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;;;;;;;;;OAUG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IAC1D;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;;;;;;;OASG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IACzD;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAClC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,qBAAqB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAChD;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAC;IACrC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D;;OAEG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,KAAK,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAE7E;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;IAEnB;;OAEG;IACH,iBAAiB,CAAC,EAAE,iBAAiB,CAAC;CACvC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts new file mode 100644 index 0000000..eea3572 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike; +//# sourceMappingURL=xhrHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map new file mode 100644 index 0000000..3c18856 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.d.ts","sourceRoot":"","sources":["../../../src/xhrHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAyB,eAAe,EAAE,MAAM,eAAe,CAAC;AAEvE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAGhE;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IACvC,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CA6F7E;AAwED,wBAAgB,YAAY,CAAC,GAAG,EAAE,cAAc,GAAG,eAAe,CAajE"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/LICENSE b/node_modules/@azure/core-lro/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-lro/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-lro/README.md b/node_modules/@azure/core-lro/README.md new file mode 100644 index 0000000..17847b9 --- /dev/null +++ b/node_modules/@azure/core-lro/README.md @@ -0,0 +1,94 @@ +# Azure Core LRO client library for JavaScript + +This is the default implementation of long running operations in Azure SDK JavaScript client libraries which work in both the browser and NodeJS. This library is primarily intended to be used in code generated by [AutoRest](https://github.com/Azure/Autorest) and [`autorest.typescript`](https://github.com/Azure/autorest.typescript). + +`@azure/core-lro` follows [The Azure SDK Design Guidelines for Long Running Operations](https://azure.github.io/azure-sdk/typescript_design.html#ts-lro) + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro) +- [Package (npm)](https://www.npmjs.com/package/@azure/core-lro) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/@azure/core-lro) +- [Samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/samples) + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +### Installation + +This package is primarily used in generated code and not meant to be consumed directly by end users. + +## Key concepts + +### `SimplePollerLike` + +A poller is an object that can poll the long running operation on the server for its state until it reaches a terminal state. It provides the following methods: + +- `getOperationState`: returns the state of the operation, typed as a type that extends `OperationState` +- `getResult`: returns the result of the operation when it completes and `undefined` otherwise +- `isDone`: returns whether the operation is in a terminal state +- `isStopped`: returns whether the polling stopped +- `onProgress`: registers callback functions to be called every time a polling response is received +- `poll`: sends a single polling request +- `pollUntilDone`: returns a promise that will resolve with the result of the operation +- `stopPolling`: stops polling; +- `toString`: serializes the state of the poller + +### `OperationState` + +A type for the operation state. It contains a `status` field with the following possible values: `notStarted`, `running`, `succeeded`, `failed`, and `canceled`. It can be accessed as follows: + +```typescript +switch(poller.getOperationState().status) { + case "succeeded": // return poller.getResult(); + case "failed": // throw poller.getOperationState().error; + case "canceled": // throw new Error("Operation was canceled"); + case "running": // ... + case "notStarted": // ... +} +``` + +### `createHttpPoller` + +A function that returns an object of type `SimplePollerLike`. This poller behaves as follows in the presence of errors: + +- calls to `poll` and `pollUntilDone` will throw an error in case the operation has failed or canceled unless the `resolveOnUnsuccessful` option was set to true. +- `poller.getOperationState().status` will be set to true when either the operation fails or it returns an error response. + + +## Examples + +Examples can be found in the `samples` folder. + +## Troubleshooting + +### Logging + +Logs can be added at the discretion of the library implementing the Long Running Operation poller. +Packages inside of [azure-sdk-for-js](https://github.com/Azure/azure-sdk-for-js) use +[@azure/logger](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger). + +## Next steps + +Please take a look at the [samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/samples) directory for detailed examples on how to use this library. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +### Testing + +To run our tests, first install the dependencies (with `npm install` or `rush install`), +then run the unit tests with: `npm run unit-test`. + +### Code of Conduct + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-lro%2FREADME.png) diff --git a/node_modules/@azure/core-lro/dist/browser/http/models.d.ts b/node_modules/@azure/core-lro/dist/browser/http/models.d.ts new file mode 100644 index 0000000..c6ebb32 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/models.d.ts @@ -0,0 +1,106 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { LroError } from "../poller/models.js"; +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; +/** + * The type of a LRO response body. This is just a convenience type for checking the status of the operation. + */ +export interface ResponseBody extends Record { + /** The status of the operation. */ + status?: unknown; + /** The state of the provisioning process */ + provisioningState?: unknown; + /** The properties of the provisioning process */ + properties?: { + provisioningState?: unknown; + } & Record; + /** The error if the operation failed */ + error?: Partial; + /** The location of the created resource */ + resourceLocation?: string; +} +/** + * Simple type of the raw response. + */ +export interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} +/** + * The type of the response of a LRO. + */ +export interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} +/** + * Description of a long running operation. + */ +export interface LongRunningOperation { + /** + * The request path. This should be set if the operation is a PUT and needs + * to poll from the same request path. + */ + requestPath?: string; + /** + * The HTTP request method. This should be set if the operation is a PUT or a + * DELETE. + */ + requestMethod?: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string, options?: { + abortSignal?: AbortSignalLike; + }) => Promise>; +} +export type HttpOperationMode = "OperationLocation" | "ResourceLocation" | "Body"; +/** + * Options for `createPoller`. + */ +export interface CreateHttpPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + resourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, response: LroResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/models.d.ts.map b/node_modules/@azure/core-lro/dist/browser/http/models.d.ts.map new file mode 100644 index 0000000..8c69ed8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAG/C;;GAEG;AACH,MAAM,MAAM,yBAAyB,GAAG,uBAAuB,GAAG,UAAU,GAAG,cAAc,CAAC;AAE9F;;GAEG;AAEH,MAAM,WAAW,YAAa,SAAQ,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAC3D,mCAAmC;IACnC,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,4CAA4C;IAC5C,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,iDAAiD;IACjD,UAAU,CAAC,EAAE;QAAE,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvE,wCAAwC;IACxC,KAAK,CAAC,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC;IAC1B,2CAA2C;IAC3C,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,2BAA2B;IAC3B,UAAU,EAAE,MAAM,CAAC;IACnB,iJAAiJ;IACjJ,OAAO,EAAE;QACP,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAC;KAC9B,CAAC;IACF,+BAA+B;IAC/B,IAAI,CAAC,EAAE,OAAO,CAAC;CAChB;AAGD;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,CAAC,GAAG,OAAO;IACtC,6BAA6B;IAC7B,YAAY,EAAE,CAAC,CAAC;IAChB,uBAAuB;IACvB,WAAW,EAAE,WAAW,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB,CAAC,CAAC,GAAG,OAAO;IAC/C;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,kBAAkB,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACxD;;OAEG;IACH,eAAe,EAAE,CACf,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,KACxC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;CAC9B;AAED,MAAM,MAAM,iBAAiB,GAAG,mBAAmB,GAAG,kBAAkB,GAAG,MAAM,CAAC;AAElF;;GAEG;AACH,MAAM,WAAW,uBAAuB,CAAC,OAAO,EAAE,MAAM;IACtD;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,WAAW,KAAK,IAAI,CAAC;IAC7D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;IAC5D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/models.js b/node_modules/@azure/core-lro/dist/browser/http/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/models.js.map b/node_modules/@azure/core-lro/dist/browser/http/models.js.map new file mode 100644 index 0000000..ccc0d92 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { LroError } from \"../poller/models.js\";\n\n// TODO: rename to ResourceLocationConfig\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface ResponseBody extends Record {\n /** The status of the operation. */\n status?: unknown;\n /** The state of the provisioning process */\n provisioningState?: unknown;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: unknown } & Record;\n /** The error if the operation failed */\n error?: Partial;\n /** The location of the created resource */\n resourceLocation?: string;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n// TODO: rename to OperationResponse\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path. This should be set if the operation is a PUT and needs\n * to poll from the same request path.\n */\n requestPath?: string;\n /**\n * The HTTP request method. This should be set if the operation is a PUT or a\n * DELETE.\n */\n requestMethod?: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (\n path: string,\n options?: { abortSignal?: AbortSignalLike },\n ) => Promise>;\n}\n\nexport type HttpOperationMode = \"OperationLocation\" | \"ResourceLocation\" | \"Body\";\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreateHttpPollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n resourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, response: LroResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/operation.d.ts b/node_modules/@azure/core-lro/dist/browser/http/operation.d.ts new file mode 100644 index 0000000..c06358e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/operation.d.ts @@ -0,0 +1,47 @@ +import { HttpOperationMode, LongRunningOperation, LroResourceLocationConfig, LroResponse, RawResponse } from "./models.js"; +import { LroError, OperationConfig, OperationStatus, RestorableOperationState, StateProxy } from "../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +export declare function inferLroMode(inputs: { + rawResponse: RawResponse; + requestPath?: string; + requestMethod?: string; + resourceLocationConfig?: LroResourceLocationConfig; +}): (OperationConfig & { + mode: HttpOperationMode; +}) | undefined; +export declare function parseRetryAfter({ rawResponse }: LroResponse): number | undefined; +export declare function getErrorFromResponse(response: LroResponse): LroError | undefined; +export declare function getStatusFromInitialResponse(inputs: { + response: LroResponse; + state: RestorableOperationState; + operationLocation?: string; +}): OperationStatus; +/** + * Initiates the long-running operation. + */ +export declare function initHttpOperation(inputs: { + stateProxy: StateProxy; + resourceLocationConfig?: LroResourceLocationConfig; + processResult?: (result: unknown, state: TState) => TResult; + setErrorAsResult: boolean; + lro: LongRunningOperation; +}): Promise>; +export declare function getOperationLocation({ rawResponse }: LroResponse, state: RestorableOperationState): string | undefined; +export declare function getOperationStatus({ rawResponse }: LroResponse, state: RestorableOperationState): OperationStatus; +export declare function getResourceLocation(res: LroResponse, state: RestorableOperationState): string | undefined; +export declare function isOperationError(e: Error): boolean; +/** Polls the long-running operation. */ +export declare function pollHttpOperation(inputs: { + lro: LongRunningOperation; + stateProxy: StateProxy; + processResult?: (result: unknown, state: TState) => TResult; + updateState?: (state: TState, lastResponse: LroResponse) => void; + isDone?: (lastResponse: LroResponse, state: TState) => boolean; + setDelay: (intervalInMs: number) => void; + options?: { + abortSignal?: AbortSignalLike; + }; + state: RestorableOperationState; + setErrorAsResult: boolean; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/operation.d.ts.map b/node_modules/@azure/core-lro/dist/browser/http/operation.d.ts.map new file mode 100644 index 0000000..39d679a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,yBAAyB,EACzB,WAAW,EACX,WAAW,EAEZ,MAAM,aAAa,CAAC;AACrB,OAAO,EACL,QAAQ,EACR,eAAe,EACf,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AA6D1D,wBAAgB,YAAY,CAAC,MAAM,EAAE;IACnC,WAAW,EAAE,WAAW,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;CACpD,GAAG,CAAC,eAAe,GAAG;IAAE,IAAI,EAAE,iBAAiB,CAAA;CAAE,CAAC,GAAG,SAAS,CA+B9D;AAqDD,wBAAgB,eAAe,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,SAAS,CAUtF;AAED,wBAAgB,oBAAoB,CAAC,CAAC,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,QAAQ,GAAG,SAAS,CAetF;AAWD,wBAAgB,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE;IAC3D,QAAQ,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B,GAAG,eAAe,CAelB;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IAC/D,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,GAAG,EAAE,oBAAoB,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAyB5C;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EACzC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAiBpB;AAED,wBAAgB,kBAAkB,CAAC,MAAM,EACvC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,eAAe,CAejB;AASD,wBAAgB,mBAAmB,CAAC,MAAM,EACxC,GAAG,EAAE,WAAW,EAChB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAMpB;AAED,wBAAgB,gBAAgB,CAAC,CAAC,EAAE,KAAK,GAAG,OAAO,CAElD;AAED,wCAAwC;AACxC,wBAAsB,iBAAiB,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE;IAC/D,GAAG,EAAE,oBAAoB,CAAC;IAC1B,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC/D,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,CAAC;IAC5C,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,IAAI,CAAC,CAkChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/operation.js b/node_modules/@azure/core-lro/dist/browser/http/operation.js new file mode 100644 index 0000000..0383844 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/operation.js @@ -0,0 +1,280 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { initOperation, pollOperation } from "../poller/operation.js"; +import { logger } from "../logger.js"; +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation ?? azureAsyncOperation; +} +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(inputs) { + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return getDefault() ?? requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } +} +export function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; + } + else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath, + }; + } + else { + return undefined; + } +} +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status?.toLocaleLowerCase()) { + case undefined: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } +} +function getStatus(rawResponse) { + const { status } = rawResponse.body ?? {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function getProvisioningState(rawResponse) { + const { properties, provisioningState } = rawResponse.body ?? {}; + const status = properties?.provisioningState ?? provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } + else if (statusCode < 300) { + return "succeeded"; + } + else { + return "failed"; + } +} +export function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter)) + : retryAfterInSeconds * 1000; + } + return undefined; +} +export function getErrorFromResponse(response) { + const error = accessBodyProperty(response, "error"); + if (!error) { + logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; +} +function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return undefined; +} +export function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} +/** + * Initiates the long-running operation. + */ +export async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return initOperation({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + stateProxy, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, + }); +} +export function getOperationLocation({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return undefined; + } + } +} +export function getOperationStatus({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } +} +function accessBodyProperty({ flatResponse, rawResponse }, prop) { + return flatResponse?.[prop] ?? rawResponse.body?.[prop]; +} +export function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; +} +export function isOperationError(e) { + return e.name === "RestError"; +} +/** Polls the long-running operation. */ +export async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; + return pollOperation({ + state, + stateProxy, + setDelay, + processResult: processResult + ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) + : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, + }); +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/operation.js.map b/node_modules/@azure/core-lro/dist/browser/http/operation.js.map new file mode 100644 index 0000000..fa0dde7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAiBlC,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,wBAAwB,CAAC;AAEtE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,SAAS,8BAA8B,CAAC,MAGvC;IACC,MAAM,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IAC1D,OAAO,iBAAiB,IAAI,mBAAmB,CAAC;AAClD,CAAC;AAED,SAAS,iBAAiB,CAAC,WAAwB;IACjD,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,0BAA0B,CAAC,WAAwB;IAC1D,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,4BAA4B,CAAC,WAAwB;IAC5D,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAAC,MAK7B;IACC,MAAM,EAAE,QAAQ,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IAChF,QAAQ,aAAa,EAAE,CAAC;QACtB,KAAK,KAAK,CAAC,CAAC,CAAC;YACX,OAAO,WAAW,CAAC;QACrB,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,KAAK,OAAO,CAAC,CAAC,CAAC;YACb,OAAO,UAAU,EAAE,IAAI,WAAW,CAAC;QACrC,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,UAAU,EAAE,CAAC;QACtB,CAAC;IACH,CAAC;IAED,SAAS,UAAU;QACjB,QAAQ,sBAAsB,EAAE,CAAC;YAC/B,KAAK,uBAAuB,CAAC,CAAC,CAAC;gBAC7B,OAAO,SAAS,CAAC;YACnB,CAAC;YACD,KAAK,cAAc,CAAC,CAAC,CAAC;gBACpB,OAAO,WAAW,CAAC;YACrB,CAAC;YACD,KAAK,UAAU,CAAC;YAChB,OAAO,CAAC,CAAC,CAAC;gBACR,OAAO,QAAQ,CAAC;YAClB,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,MAK5B;IACC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IACnF,MAAM,iBAAiB,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAC;IAClE,MAAM,mBAAmB,GAAG,4BAA4B,CAAC,WAAW,CAAC,CAAC;IACtE,MAAM,UAAU,GAAG,8BAA8B,CAAC,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC9F,MAAM,QAAQ,GAAG,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,uBAAuB,GAAG,aAAa,EAAE,iBAAiB,EAAE,CAAC;IACnE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,iBAAiB,EAAE,UAAU;YAC7B,gBAAgB,EAAE,oBAAoB,CAAC;gBACrC,aAAa,EAAE,uBAAuB;gBACtC,QAAQ;gBACR,WAAW;gBACX,sBAAsB;aACvB,CAAC;SACH,CAAC;IACJ,CAAC;SAAM,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,kBAAkB;YACxB,iBAAiB,EAAE,QAAQ;SAC5B,CAAC;IACJ,CAAC;SAAM,IAAI,uBAAuB,KAAK,KAAK,IAAI,WAAW,EAAE,CAAC;QAC5D,OAAO;YACL,IAAI,EAAE,MAAM;YACZ,iBAAiB,EAAE,WAAW;SAC/B,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,OAAO,SAAS,CAAC;IACnB,CAAC;AACH,CAAC;AAED,SAAS,eAAe,CAAC,MAA+C;IACtE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,MAAM,CAAC;IACtC,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACvD,MAAM,IAAI,KAAK,CACb,oGAAoG,MAAM,sIAAsI,CACjP,CAAC;IACJ,CAAC;IACD,QAAQ,MAAM,EAAE,iBAAiB,EAAE,EAAE,CAAC;QACpC,KAAK,SAAS;YACZ,OAAO,iBAAiB,CAAC,UAAU,CAAC,CAAC;QACvC,KAAK,WAAW;YACd,OAAO,WAAW,CAAC;QACrB,KAAK,QAAQ;YACX,OAAO,QAAQ,CAAC;QAClB,KAAK,SAAS,CAAC;QACf,KAAK,UAAU,CAAC;QAChB,KAAK,SAAS,CAAC;QACf,KAAK,WAAW,CAAC;QACjB,KAAK,YAAY;YACf,OAAO,SAAS,CAAC;QACnB,KAAK,UAAU,CAAC;QAChB,KAAK,WAAW;YACd,OAAO,UAAU,CAAC;QACpB,OAAO,CAAC,CAAC,CAAC;YACR,MAAM,CAAC,OAAO,CAAC,uCAAuC,MAAM,EAAE,CAAC,CAAC;YAChE,OAAO,MAAyB,CAAC;QACnC,CAAC;IACH,CAAC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,WAAwB;IACzC,MAAM,EAAE,MAAM,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IAC5D,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB;IACpD,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IACnF,MAAM,MAAM,GAAG,UAAU,EAAE,iBAAiB,IAAI,iBAAiB,CAAC;IAClE,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB;IAC3C,IAAI,UAAU,KAAK,GAAG,EAAE,CAAC;QACvB,OAAO,SAAS,CAAC;IACnB,CAAC;SAAM,IAAI,UAAU,GAAG,GAAG,EAAE,CAAC;QAC5B,OAAO,WAAW,CAAC;IACrB,CAAC;SAAM,CAAC;QACN,OAAO,QAAQ,CAAC;IAClB,CAAC;AACH,CAAC;AAED,MAAM,UAAU,eAAe,CAAI,EAAE,WAAW,EAAkB;IAChE,MAAM,UAAU,GAAuB,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;IAC1E,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,wEAAwE;QACxE,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;QACjD,OAAO,KAAK,CAAC,mBAAmB,CAAC;YAC/B,CAAC,CAAC,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC;YACxD,CAAC,CAAC,mBAAmB,GAAG,IAAI,CAAC;IACjC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAI,QAAwB;IAC9D,MAAM,KAAK,GAAG,kBAAkB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACpD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,MAAM,CAAC,OAAO,CACZ,yFAAyF,CAC1F,CAAC;QACF,OAAO;IACT,CAAC;IACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC;QAClC,MAAM,CAAC,OAAO,CACZ,iHAAiH,CAClH,CAAC;QACF,OAAO;IACT,CAAC;IACD,OAAO,KAAiB,CAAC;AAC3B,CAAC;AAED,SAAS,gCAAgC,CAAC,cAAoB;IAC5D,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE,CAAC;QAC7B,OAAO,cAAc,GAAG,OAAO,CAAC;IAClC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,4BAA4B,CAAS,MAIpD;IACC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IACtD,SAAS,MAAM;QACb,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;QAC7C,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,SAAS;gBACZ,OAAO,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC5D,KAAK,MAAM;gBACT,OAAO,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YAC7C;gBACE,OAAO,SAAS,CAAC;QACrB,CAAC;IACH,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC;IACxB,OAAO,MAAM,KAAK,SAAS,IAAI,iBAAiB,KAAK,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC;AACxF,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAkB,MAMxD;IACC,MAAM,EAAE,UAAU,EAAE,sBAAsB,EAAE,aAAa,EAAE,GAAG,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IAC5F,OAAO,aAAa,CAAC;QACnB,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,UAAU;QACV,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,kBAAkB,EAAE,4BAA4B;QAChD,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,8BAA8B,CAAC;gBACpC,iBAAiB,EAAE,0BAA0B,CAAC,WAAW,CAAC;gBAC1D,mBAAmB,EAAE,4BAA4B,CAAC,WAAW,CAAC;aAC/D,CAAC,CAAC;QACL,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,CAAC;QACxC,CAAC;QACD,KAAK,MAAM,CAAC;QACZ,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,SAAS,CAAC;QACnB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,kBAAkB,CAChC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,SAAS,CAAC,WAAW,CAAC,CAAC;QAChC,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,OAAO,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAC3C,CAAC;QACD;YACE,MAAM,IAAI,KAAK,CAAC,8CAA8C,IAAI,EAAE,CAAC,CAAC;IAC1E,CAAC;AACH,CAAC;AAED,SAAS,kBAAkB,CACzB,EAAE,YAAY,EAAE,WAAW,EAAe,EAC1C,IAAO;IAEP,OAAQ,YAA6B,EAAE,CAAC,IAAI,CAAC,IAAK,WAAW,CAAC,IAAqB,EAAE,CAAC,IAAI,CAAC,CAAC;AAC9F,CAAC;AAED,MAAM,UAAU,mBAAmB,CACjC,GAAgB,EAChB,KAAuC;IAEvC,MAAM,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC;IACxD,IAAI,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE,CAAC;QACnC,KAAK,CAAC,MAAM,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACtC,CAAC;IACD,OAAO,KAAK,CAAC,MAAM,CAAC,gBAAgB,CAAC;AACvC,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,CAAQ;IACvC,OAAO,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC;AAChC,CAAC;AAED,wCAAwC;AACxC,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAkB,MAUxD;IACC,MAAM,EACJ,GAAG,EACH,UAAU,EACV,OAAO,EACP,aAAa,EACb,WAAW,EACX,QAAQ,EACR,KAAK,EACL,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,OAAO,aAAa,CAAC;QACnB,KAAK;QACL,UAAU;QACV,QAAQ;QACR,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,UAAU,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,UAAU,CAAC;YAC3E,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,QAAQ,EAAE,oBAAoB;QAC9B,WAAW;QACX,kBAAkB,EAAE,eAAe;QACnC,oBAAoB;QACpB,kBAAkB;QAClB,gBAAgB;QAChB,mBAAmB;QACnB,OAAO;QACP;;;WAGG;QACH,IAAI,EAAE,KAAK,EAAE,QAAgB,EAAE,YAAgD,EAAE,EAAE,CACjF,GAAG,CAAC,eAAe,CAAC,QAAQ,EAAE,YAAY,CAAC;QAC7C,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n HttpOperationMode,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n RawResponse,\n ResponseBody,\n} from \"./models.js\";\nimport {\n LroError,\n OperationConfig,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"../poller/models.js\";\nimport { initOperation, pollOperation } from \"../poller/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { logger } from \"../logger.js\";\n\nfunction getOperationLocationPollingUrl(inputs: {\n operationLocation?: string;\n azureAsyncOperation?: string;\n}): string | undefined {\n const { azureAsyncOperation, operationLocation } = inputs;\n return operationLocation ?? azureAsyncOperation;\n}\n\nfunction getLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(inputs: {\n requestMethod?: string;\n location?: string;\n requestPath?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): string | undefined {\n const { location, requestMethod, requestPath, resourceLocationConfig } = inputs;\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"DELETE\": {\n return undefined;\n }\n case \"PATCH\": {\n return getDefault() ?? requestPath;\n }\n default: {\n return getDefault();\n }\n }\n\n function getDefault() {\n switch (resourceLocationConfig) {\n case \"azure-async-operation\": {\n return undefined;\n }\n case \"original-uri\": {\n return requestPath;\n }\n case \"location\":\n default: {\n return location;\n }\n }\n }\n}\n\nexport function inferLroMode(inputs: {\n rawResponse: RawResponse;\n requestPath?: string;\n requestMethod?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): (OperationConfig & { mode: HttpOperationMode }) | undefined {\n const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs;\n const operationLocation = getOperationLocationHeader(rawResponse);\n const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse);\n const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation });\n const location = getLocationHeader(rawResponse);\n const normalizedRequestMethod = requestMethod?.toLocaleUpperCase();\n if (pollingUrl !== undefined) {\n return {\n mode: \"OperationLocation\",\n operationLocation: pollingUrl,\n resourceLocation: findResourceLocation({\n requestMethod: normalizedRequestMethod,\n location,\n requestPath,\n resourceLocationConfig,\n }),\n };\n } else if (location !== undefined) {\n return {\n mode: \"ResourceLocation\",\n operationLocation: location,\n };\n } else if (normalizedRequestMethod === \"PUT\" && requestPath) {\n return {\n mode: \"Body\",\n operationLocation: requestPath,\n };\n } else {\n return undefined;\n }\n}\n\nfunction transformStatus(inputs: { status: unknown; statusCode: number }): OperationStatus {\n const { status, statusCode } = inputs;\n if (typeof status !== \"string\" && status !== undefined) {\n throw new Error(\n `Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`,\n );\n }\n switch (status?.toLocaleLowerCase()) {\n case undefined:\n return toOperationStatus(statusCode);\n case \"succeeded\":\n return \"succeeded\";\n case \"failed\":\n return \"failed\";\n case \"running\":\n case \"accepted\":\n case \"started\":\n case \"canceling\":\n case \"cancelling\":\n return \"running\";\n case \"canceled\":\n case \"cancelled\":\n return \"canceled\";\n default: {\n logger.verbose(`LRO: unrecognized operation status: ${status}`);\n return status as OperationStatus;\n }\n }\n}\n\nfunction getStatus(rawResponse: RawResponse): OperationStatus {\n const { status } = (rawResponse.body as ResponseBody) ?? {};\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction getProvisioningState(rawResponse: RawResponse): OperationStatus {\n const { properties, provisioningState } = (rawResponse.body as ResponseBody) ?? {};\n const status = properties?.provisioningState ?? provisioningState;\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction toOperationStatus(statusCode: number): OperationStatus {\n if (statusCode === 202) {\n return \"running\";\n } else if (statusCode < 300) {\n return \"succeeded\";\n } else {\n return \"failed\";\n }\n}\n\nexport function parseRetryAfter({ rawResponse }: LroResponse): number | undefined {\n const retryAfter: string | undefined = rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n return isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter))\n : retryAfterInSeconds * 1000;\n }\n return undefined;\n}\n\nexport function getErrorFromResponse(response: LroResponse): LroError | undefined {\n const error = accessBodyProperty(response, \"error\");\n if (!error) {\n logger.warning(\n `The long-running operation failed but there is no error property in the response's body`,\n );\n return;\n }\n if (!error.code || !error.message) {\n logger.warning(\n `The long-running operation failed but the error property in the response's body doesn't contain code or message`,\n );\n return;\n }\n return error as LroError;\n}\n\nfunction calculatePollingIntervalFromDate(retryAfterDate: Date): number | undefined {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return undefined;\n}\n\nexport function getStatusFromInitialResponse(inputs: {\n response: LroResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n}): OperationStatus {\n const { response, state, operationLocation } = inputs;\n function helper(): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case undefined:\n return toOperationStatus(response.rawResponse.statusCode);\n case \"Body\":\n return getOperationStatus(response, state);\n default:\n return \"running\";\n }\n }\n const status = helper();\n return status === \"running\" && operationLocation === undefined ? \"succeeded\" : status;\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initHttpOperation(inputs: {\n stateProxy: StateProxy;\n resourceLocationConfig?: LroResourceLocationConfig;\n processResult?: (result: unknown, state: TState) => TResult;\n setErrorAsResult: boolean;\n lro: LongRunningOperation;\n}): Promise> {\n const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs;\n return initOperation({\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n stateProxy,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n getOperationStatus: getStatusFromInitialResponse,\n setErrorAsResult,\n });\n}\n\nexport function getOperationLocation(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getOperationLocationPollingUrl({\n operationLocation: getOperationLocationHeader(rawResponse),\n azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse),\n });\n }\n case \"ResourceLocation\": {\n return getLocationHeader(rawResponse);\n }\n case \"Body\":\n default: {\n return undefined;\n }\n }\n}\n\nexport function getOperationStatus(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getStatus(rawResponse);\n }\n case \"ResourceLocation\": {\n return toOperationStatus(rawResponse.statusCode);\n }\n case \"Body\": {\n return getProvisioningState(rawResponse);\n }\n default:\n throw new Error(`Internal error: Unexpected operation mode: ${mode}`);\n }\n}\n\nfunction accessBodyProperty

(\n { flatResponse, rawResponse }: LroResponse,\n prop: P,\n): ResponseBody[P] {\n return (flatResponse as ResponseBody)?.[prop] ?? (rawResponse.body as ResponseBody)?.[prop];\n}\n\nexport function getResourceLocation(\n res: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const loc = accessBodyProperty(res, \"resourceLocation\");\n if (loc && typeof loc === \"string\") {\n state.config.resourceLocation = loc;\n }\n return state.config.resourceLocation;\n}\n\nexport function isOperationError(e: Error): boolean {\n return e.name === \"RestError\";\n}\n\n/** Polls the long-running operation. */\nexport async function pollHttpOperation(inputs: {\n lro: LongRunningOperation;\n stateProxy: StateProxy;\n processResult?: (result: unknown, state: TState) => TResult;\n updateState?: (state: TState, lastResponse: LroResponse) => void;\n isDone?: (lastResponse: LroResponse, state: TState) => boolean;\n setDelay: (intervalInMs: number) => void;\n options?: { abortSignal?: AbortSignalLike };\n state: RestorableOperationState;\n setErrorAsResult: boolean;\n}): Promise {\n const {\n lro,\n stateProxy,\n options,\n processResult,\n updateState,\n setDelay,\n state,\n setErrorAsResult,\n } = inputs;\n return pollOperation({\n state,\n stateProxy,\n setDelay,\n processResult: processResult\n ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState)\n : ({ flatResponse }) => flatResponse as TResult,\n getError: getErrorFromResponse,\n updateState,\n getPollingInterval: parseRetryAfter,\n getOperationLocation,\n getOperationStatus,\n isOperationError,\n getResourceLocation,\n options,\n /**\n * The expansion here is intentional because `lro` could be an object that\n * references an inner this, so we need to preserve a reference to it.\n */\n poll: async (location: string, inputOptions?: { abortSignal?: AbortSignalLike }) =>\n lro.sendPollRequest(location, inputOptions),\n setErrorAsResult,\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/poller.d.ts b/node_modules/@azure/core-lro/dist/browser/http/poller.d.ts new file mode 100644 index 0000000..5199c5c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/poller.d.ts @@ -0,0 +1,11 @@ +import { LongRunningOperation } from "./models.js"; +import { OperationState, SimplePollerLike } from "../poller/models.js"; +import { CreateHttpPollerOptions } from "./models.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export declare function createHttpPoller>(lro: LongRunningOperation, options?: CreateHttpPollerOptions): Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/poller.d.ts.map b/node_modules/@azure/core-lro/dist/browser/http/poller.d.ts.map new file mode 100644 index 0000000..5f6cfe9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAe,MAAM,aAAa,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAWvE,OAAO,EAAE,uBAAuB,EAAE,MAAM,aAAa,CAAC;AAGtD;;;;;GAKG;AACH,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EACpF,GAAG,EAAE,oBAAoB,EACzB,OAAO,CAAC,EAAE,uBAAuB,CAAC,OAAO,EAAE,MAAM,CAAC,GACjD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAgD5C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/poller.js b/node_modules/@azure/core-lro/dist/browser/http/poller.js new file mode 100644 index 0000000..84379fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/poller.js @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getErrorFromResponse, getOperationLocation, getOperationStatus, getResourceLocation, getStatusFromInitialResponse, inferLroMode, isOperationError, parseRetryAfter, } from "./operation.js"; +import { buildCreatePoller } from "../poller/poller.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; + return buildCreatePoller({ + getStatusFromInitialResponse, + getStatusFromPollResponse: getOperationStatus, + isOperationError, + getOperationLocation, + getResourceLocation, + getPollingInterval: parseRetryAfter, + getError: getErrorFromResponse, + resolveOnUnsuccessful, + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + poll: lro.sendPollRequest, + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + }); +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/http/poller.js.map b/node_modules/@azure/core-lro/dist/browser/http/poller.js.map new file mode 100644 index 0000000..6a95868 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/http/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EACL,oBAAoB,EACpB,oBAAoB,EACpB,kBAAkB,EAClB,mBAAmB,EACnB,4BAA4B,EAC5B,YAAY,EACZ,gBAAgB,EAChB,eAAe,GAChB,MAAM,gBAAgB,CAAC;AAExB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,GAAyB,EACzB,OAAkD;IAElD,MAAM,EACJ,sBAAsB,EACtB,YAAY,EACZ,aAAa,EACb,WAAW,EACX,WAAW,EACX,qBAAqB,EACrB,qBAAqB,GAAG,KAAK,GAC9B,GAAG,OAAO,IAAI,EAAE,CAAC;IAClB,OAAO,iBAAiB,CAA+B;QACrD,4BAA4B;QAC5B,yBAAyB,EAAE,kBAAkB;QAC7C,gBAAgB;QAChB,oBAAoB;QACpB,mBAAmB;QACnB,kBAAkB,EAAE,eAAe;QACnC,QAAQ,EAAE,oBAAoB;QAC9B,qBAAqB;KACtB,CAAC,CACA;QACE,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,IAAI,EAAE,GAAG,CAAC,eAAe;KAC1B,EACD;QACE,YAAY;QACZ,qBAAqB;QACrB,WAAW;QACX,WAAW;QACX,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;KAClD,CACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResponse } from \"./models.js\";\nimport { OperationState, SimplePollerLike } from \"../poller/models.js\";\nimport {\n getErrorFromResponse,\n getOperationLocation,\n getOperationStatus,\n getResourceLocation,\n getStatusFromInitialResponse,\n inferLroMode,\n isOperationError,\n parseRetryAfter,\n} from \"./operation.js\";\nimport { CreateHttpPollerOptions } from \"./models.js\";\nimport { buildCreatePoller } from \"../poller/poller.js\";\n\n/**\n * Creates a poller that can be used to poll a long-running operation.\n * @param lro - Description of the long-running operation\n * @param options - options to configure the poller\n * @returns an initialized poller\n */\nexport async function createHttpPoller>(\n lro: LongRunningOperation,\n options?: CreateHttpPollerOptions,\n): Promise> {\n const {\n resourceLocationConfig,\n intervalInMs,\n processResult,\n restoreFrom,\n updateState,\n withOperationLocation,\n resolveOnUnsuccessful = false,\n } = options || {};\n return buildCreatePoller({\n getStatusFromInitialResponse,\n getStatusFromPollResponse: getOperationStatus,\n isOperationError,\n getOperationLocation,\n getResourceLocation,\n getPollingInterval: parseRetryAfter,\n getError: getErrorFromResponse,\n resolveOnUnsuccessful,\n })(\n {\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n poll: lro.sendPollRequest,\n },\n {\n intervalInMs,\n withOperationLocation,\n restoreFrom,\n updateState,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/index.d.ts b/node_modules/@azure/core-lro/dist/browser/index.d.ts new file mode 100644 index 0000000..9783948 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/index.d.ts @@ -0,0 +1,13 @@ +export { createHttpPoller } from "./http/poller.js"; +export { CancelOnProgress, OperationState, OperationStatus, SimplePollerLike, } from "./poller/models.js"; +export { CreateHttpPollerOptions } from "./http/models.js"; +export { LroResourceLocationConfig, LongRunningOperation, LroResponse, RawResponse, } from "./http/models.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +export { PollerLike } from "./legacy/models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/index.d.ts.map b/node_modules/@azure/core-lro/dist/browser/index.d.ts.map new file mode 100644 index 0000000..c096dc1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,cAAc,EACd,eAAe,EACf,gBAAgB,GACjB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC;AAC3D,OAAO,EACL,yBAAyB,EACzB,oBAAoB,EACpB,WAAW,EACX,WAAW,GACZ,MAAM,kBAAkB,CAAC;AAE1B;;GAEG;AAUH,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/index.js b/node_modules/@azure/core-lro/dist/browser/index.js new file mode 100644 index 0000000..3952c2a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/index.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createHttpPoller } from "./http/poller.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +// export { +// BuildCreatePollerOptions, +// Operation, +// CreatePollerOptions, +// OperationConfig, +// RestorableOperationState, +// } from "./poller/models"; +// export { buildCreatePoller } from "./poller/poller"; +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/index.js.map b/node_modules/@azure/core-lro/dist/browser/index.js.map new file mode 100644 index 0000000..689d59e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AAepD;;GAEG;AACH,WAAW;AACX,8BAA8B;AAC9B,eAAe;AACf,yBAAyB;AACzB,qBAAqB;AACrB,8BAA8B;AAC9B,4BAA4B;AAC5B,uDAAuD;AAEvD,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { createHttpPoller } from \"./http/poller.js\";\nexport {\n CancelOnProgress,\n OperationState,\n OperationStatus,\n SimplePollerLike,\n} from \"./poller/models.js\";\nexport { CreateHttpPollerOptions } from \"./http/models.js\";\nexport {\n LroResourceLocationConfig,\n LongRunningOperation,\n LroResponse,\n RawResponse,\n} from \"./http/models.js\";\n\n/**\n * This can be uncommented to expose the protocol-agnostic poller\n */\n// export {\n// BuildCreatePollerOptions,\n// Operation,\n// CreatePollerOptions,\n// OperationConfig,\n// RestorableOperationState,\n// } from \"./poller/models\";\n// export { buildCreatePoller } from \"./poller/poller\";\n\n/** legacy */\nexport * from \"./legacy/lroEngine/index.js\";\nexport * from \"./legacy/poller.js\";\nexport * from \"./legacy/pollOperation.js\";\nexport { PollerLike } from \"./legacy/models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.d.ts new file mode 100644 index 0000000..b0d84c8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.d.ts @@ -0,0 +1,3 @@ +export { LroEngine } from "./lroEngine.js"; +export { LroEngineOptions } from "./models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.d.ts.map new file mode 100644 index 0000000..ebf1159 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.js b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.js new file mode 100644 index 0000000..ec17805 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { LroEngine } from "./lroEngine.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.js.map new file mode 100644 index 0000000..0c4dc7a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { LroEngine } from \"./lroEngine.js\";\nexport { LroEngineOptions } from \"./models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.d.ts new file mode 100644 index 0000000..937101c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.d.ts @@ -0,0 +1,16 @@ +import { LroEngineOptions } from "./models.js"; +import { LongRunningOperation } from "../../http/models.js"; +import { PollOperationState } from "../pollOperation.js"; +import { Poller } from "../poller.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} +//# sourceMappingURL=lroEngine.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.d.ts.map new file mode 100644 index 0000000..640c27e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAgB,MAAM,aAAa,CAAC;AAE7D,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAE5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAItC;;GAEG;AACH,qBAAa,SAAS,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CAAE,SAAQ,MAAM,CACxF,MAAM,EACN,OAAO,CACR;IACC,OAAO,CAAC,MAAM,CAAe;gBAEjB,GAAG,EAAE,oBAAoB,CAAC,OAAO,CAAC,EAAE,OAAO,CAAC,EAAE,gBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC;IA6B3F;;OAEG;IACH,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAGvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js new file mode 100644 index 0000000..351691a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { GenericPollOperation } from "./operation.js"; +import { POLL_INTERVAL_IN_MS } from "../../poller/constants.js"; +import { Poller } from "../poller.js"; +import { deserializeState } from "../../poller/operation.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export class LroEngine extends Poller { + config; + constructor(lro, options) { + const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +//# sourceMappingURL=lroEngine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js.map new file mode 100644 index 0000000..d5a5ef2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAEtD,OAAO,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAEhE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,OAAO,EAAE,gBAAgB,EAAE,MAAM,2BAA2B,CAAC;AAE7D;;GAEG;AACH,MAAM,OAAO,SAA+D,SAAQ,MAGnF;IACS,MAAM,CAAe;IAE7B,YAAY,GAAkC,EAAE,OAA2C;QACzF,MAAM,EACJ,YAAY,GAAG,mBAAmB,EAClC,UAAU,EACV,qBAAqB,GAAG,KAAK,EAC7B,MAAM,EACN,yBAAyB,EACzB,aAAa,EACb,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,KAAK,GAAqC,UAAU;YACxD,CAAC,CAAC,gBAAgB,CAAC,UAAU,CAAC;YAC9B,CAAC,CAAE,EAAuC,CAAC;QAC7C,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,CAAC,qBAAqB,EACtB,yBAAyB,EACzB,aAAa,EACb,WAAW,EACX,MAAM,CACP,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QACjB,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;QAEnD,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;QAC7C,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;IACzF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroEngineOptions, PollerConfig } from \"./models.js\";\nimport { GenericPollOperation } from \"./operation.js\";\nimport { LongRunningOperation } from \"../../http/models.js\";\nimport { POLL_INTERVAL_IN_MS } from \"../../poller/constants.js\";\nimport { PollOperationState } from \"../pollOperation.js\";\nimport { Poller } from \"../poller.js\";\nimport { RestorableOperationState } from \"../../poller/models.js\";\nimport { deserializeState } from \"../../poller/operation.js\";\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const {\n intervalInMs = POLL_INTERVAL_IN_MS,\n resumeFrom,\n resolveOnUnsuccessful = false,\n isDone,\n lroResourceLocationConfig,\n processResult,\n updateState,\n } = options || {};\n const state: RestorableOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as RestorableOperationState);\n const operation = new GenericPollOperation(\n state,\n lro,\n !resolveOnUnsuccessful,\n lroResourceLocationConfig,\n processResult,\n updateState,\n isDone,\n );\n super(operation);\n this.resolveOnUnsuccessful = resolveOnUnsuccessful;\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.d.ts new file mode 100644 index 0000000..bf26d04 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.d.ts @@ -0,0 +1,38 @@ +import { LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +/** + * Options for the LRO poller. + */ +export interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +export interface PollerConfig { + intervalInMs: number; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.d.ts.map new file mode 100644 index 0000000..c880365 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAE9E;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,OAAO,EAAE,MAAM;IAC/C;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,yBAAyB,CAAC,EAAE,yBAAyB,CAAC;IACtD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE;;OAEG;IACH,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC3D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,YAAY;IAC3B,YAAY,EAAE,MAAM,CAAC;CACtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.js b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.js.map new file mode 100644 index 0000000..bfc01b5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.d.ts new file mode 100644 index 0000000..d1257d1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.d.ts @@ -0,0 +1,27 @@ +import { LongRunningOperation, LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +import { PollOperation, PollOperationState } from "../pollOperation.js"; +import { RestorableOperationState } from "../../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { PollerConfig } from "./models.js"; +export declare class GenericPollOperation> implements PollOperation { + state: RestorableOperationState; + private lro; + private setErrorAsResult; + private lroResourceLocationConfig?; + private processResult?; + private updateState?; + private isDone?; + private pollerConfig?; + constructor(state: RestorableOperationState, lro: LongRunningOperation, setErrorAsResult: boolean, lroResourceLocationConfig?: LroResourceLocationConfig | undefined, processResult?: ((result: unknown, state: TState) => TResult) | undefined, updateState?: ((state: TState, lastResponse: RawResponse) => void) | undefined, isDone?: ((lastResponse: TResult, state: TState) => boolean) | undefined); + setPollerConfig(pollerConfig: PollerConfig): void; + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + cancel(): Promise>; + /** + * Serializes the Poller operation. + */ + toString(): string; +} +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.d.ts.map new file mode 100644 index 0000000..9b66455 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACpG,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACxE,OAAO,EAAE,wBAAwB,EAAc,MAAM,wBAAwB,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAyB3C,qBAAa,oBAAoB,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CACnF,YAAW,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAKhC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC;IAC9C,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,gBAAgB;IACxB,OAAO,CAAC,yBAAyB,CAAC;IAClC,OAAO,CAAC,aAAa,CAAC;IACtB,OAAO,CAAC,WAAW,CAAC;IACpB,OAAO,CAAC,MAAM,CAAC;IATjB,OAAO,CAAC,YAAY,CAAC,CAAe;gBAG3B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,EACtC,GAAG,EAAE,oBAAoB,EACzB,gBAAgB,EAAE,OAAO,EACzB,yBAAyB,CAAC,uCAA2B,EACrD,aAAa,CAAC,YAAW,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA,EAC3D,WAAW,CAAC,WAAU,MAAM,gBAAgB,WAAW,KAAK,IAAI,aAAA,EAChE,MAAM,CAAC,kBAAiB,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA;IAG7D,eAAe,CAAC,YAAY,EAAE,YAAY,GAAG,IAAI;IAIlD,MAAM,CAAC,OAAO,CAAC,EAAE;QACrB,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAwCrC,MAAM,IAAI,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAKvD;;OAEG;IACI,QAAQ,IAAI,MAAM;CAK1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js new file mode 100644 index 0000000..4051ad8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { initHttpOperation, pollHttpOperation } from "../../http/operation.js"; +import { logger } from "../../logger.js"; +const createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => (state.isCancelled = true), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.isStarted = true), + setSucceeded: (state) => (state.isCompleted = true), + setFailed: () => { + /** empty body */ + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), +}); +export class GenericPollOperation { + state; + lro; + setErrorAsResult; + lroResourceLocationConfig; + processResult; + updateState; + isDone; + pollerConfig; + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = { + ...this.state, + ...(await initHttpOperation({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + })), + }; + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === undefined) { + await pollHttpOperation({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState + ? (state, { rawResponse }) => updateState(state, rawResponse) + : undefined, + isDone: isDone + ? ({ flatResponse }, state) => isDone(flatResponse, state) + : undefined, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult, + }); + } + options?.fireProgress?.(this.state); + return this; + } + async cancel() { + logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js.map new file mode 100644 index 0000000..de88f0a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAG/E,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAQ;IAC3D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;IAC/C,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IACnD,SAAS,EAAE,GAAG,EAAE;QACd,iBAAiB;IACnB,CAAC;IAED,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW;IAC1C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;IAClC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS;IACvC,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;CACzF,CAAC,CAAC;AAEH,MAAM,OAAO,oBAAoB;IAMtB;IACC;IACA;IACA;IACA;IACA;IACA;IATF,YAAY,CAAgB;IAEpC,YACS,KAAuC,EACtC,GAAyB,EACzB,gBAAyB,EACzB,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D;QAN3D,UAAK,GAAL,KAAK,CAAkC;QACtC,QAAG,GAAH,GAAG,CAAsB;QACzB,qBAAgB,GAAhB,gBAAgB,CAAS;QACzB,8BAAyB,GAAzB,yBAAyB,CAA4B;QACrD,kBAAa,GAAb,aAAa,CAA8C;QAC3D,gBAAW,GAAX,WAAW,CAAqD;QAChE,WAAM,GAAN,MAAM,CAAoD;IACjE,CAAC;IAEG,eAAe,CAAC,YAA0B;QAC/C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,OAGZ;QACC,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,GAAG;gBACX,GAAG,IAAI,CAAC,KAAK;gBACb,GAAG,CAAC,MAAM,iBAAiB,CAAC;oBAC1B,GAAG,EAAE,IAAI,CAAC,GAAG;oBACb,UAAU;oBACV,sBAAsB,EAAE,IAAI,CAAC,yBAAyB;oBACtD,aAAa,EAAE,IAAI,CAAC,aAAa;oBACjC,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;iBACxC,CAAC,CAAC;aACJ,CAAC;QACJ,CAAC;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;QACrC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAE3B,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC9D,MAAM,iBAAiB,CAAC;gBACtB,GAAG,EAAE,IAAI,CAAC,GAAG;gBACb,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,UAAU;gBACV,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,WAAW,EAAE,WAAW;oBACtB,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,WAAW,EAAE,EAAE,EAAE,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,CAAC;oBAC7D,CAAC,CAAC,SAAS;gBACb,MAAM,EAAE,MAAM;oBACZ,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,MAAM,CAAC,YAAuB,EAAE,KAAK,CAAC;oBACrE,CAAC,CAAC,SAAS;gBACb,OAAO;gBACP,QAAQ,EAAE,CAAC,YAAY,EAAE,EAAE;oBACzB,IAAI,CAAC,YAAa,CAAC,YAAY,GAAG,YAAY,CAAC;gBACjD,CAAC;gBACD,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;aACxC,CAAC,CAAC;QACL,CAAC;QACD,OAAO,EAAE,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,MAAM;QACV,MAAM,CAAC,KAAK,CAAC,+DAA+D,CAAC,CAAC;QAC9E,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\nimport { PollOperation, PollOperationState } from \"../pollOperation.js\";\nimport { RestorableOperationState, StateProxy } from \"../../poller/models.js\";\nimport { initHttpOperation, pollHttpOperation } from \"../../http/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { PollerConfig } from \"./models.js\";\nimport { logger } from \"../../logger.js\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n initState: (config) => ({ config, isStarted: true }) as any,\n setCanceled: (state) => (state.isCancelled = true),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.isStarted = true),\n setSucceeded: (state) => (state.isCompleted = true),\n setFailed: () => {\n /** empty body */\n },\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => !!state.isCancelled,\n isFailed: (state) => !!state.error,\n isRunning: (state) => !!state.isStarted,\n isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error),\n});\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private pollerConfig?: PollerConfig;\n\n constructor(\n public state: RestorableOperationState,\n private lro: LongRunningOperation,\n private setErrorAsResult: boolean,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean,\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const stateProxy = createStateProxy();\n if (!this.state.isStarted) {\n this.state = {\n ...this.state,\n ...(await initHttpOperation({\n lro: this.lro,\n stateProxy,\n resourceLocationConfig: this.lroResourceLocationConfig,\n processResult: this.processResult,\n setErrorAsResult: this.setErrorAsResult,\n })),\n };\n }\n const updateState = this.updateState;\n const isDone = this.isDone;\n\n if (!this.state.isCompleted && this.state.error === undefined) {\n await pollHttpOperation({\n lro: this.lro,\n state: this.state,\n stateProxy,\n processResult: this.processResult,\n updateState: updateState\n ? (state, { rawResponse }) => updateState(state, rawResponse)\n : undefined,\n isDone: isDone\n ? ({ flatResponse }, state) => isDone(flatResponse as TResult, state)\n : undefined,\n options,\n setDelay: (intervalInMs) => {\n this.pollerConfig!.intervalInMs = intervalInMs;\n },\n setErrorAsResult: this.setErrorAsResult,\n });\n }\n options?.fireProgress?.(this.state);\n return this;\n }\n\n async cancel(): Promise> {\n logger.error(\"`cancelOperation` is deprecated because it wasn't implemented\");\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/models.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/models.d.ts new file mode 100644 index 0000000..6ecd744 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/models.d.ts @@ -0,0 +1,66 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollOperationState } from "./pollOperation.js"; +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * @deprecated `cancelOperation` has been deprecated because it was not implemented. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/models.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/models.d.ts.map new file mode 100644 index 0000000..f8e2c78 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAExD;;GAEG;AAEH,MAAM,WAAW,UAAU,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO;IAC7E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;;OAGG;IACH,eAAe,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5E;;;;OAIG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/models.js b/node_modules/@azure/core-lro/dist/browser/legacy/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/models.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/models.js.map new file mode 100644 index 0000000..4f2a390 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollOperationState } from \"./pollOperation.js\";\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n * @deprecated `cancelOperation` has been deprecated because it was not implemented.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.d.ts new file mode 100644 index 0000000..9d1b341 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.d.ts @@ -0,0 +1,81 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + * + * @deprecated `cancel` has been deprecated because it was not implemented. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} +//# sourceMappingURL=pollOperation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.d.ts.map new file mode 100644 index 0000000..257ba63 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.d.ts","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;;;;;;;;GAUG;AACH,MAAM,WAAW,kBAAkB,CAAC,OAAO;IACzC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,WAAW,aAAa,CAAC,MAAM,EAAE,OAAO;IAC5C;;;;OAIG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;;;;;;OAQG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QACf,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE5C;;;;;;;;;;OAUG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE7F;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.js b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.js new file mode 100644 index 0000000..f8c1767 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pollOperation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.js.map new file mode 100644 index 0000000..a1437f2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/pollOperation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.js","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * PollOperationState contains an opinionated list of the smallest set of properties needed\n * to define any long running operation poller.\n *\n * While the Poller class works as the local control mechanism to start triggering, wait for,\n * and potentially cancel a long running operation, the PollOperationState documents the status\n * of the remote long running operation.\n *\n * It should be updated at least when the operation starts, when it's finished, and when it's cancelled.\n * Though, implementations can have any other number of properties that can be updated by other reasons.\n */\nexport interface PollOperationState {\n /**\n * True if the operation has started.\n */\n isStarted?: boolean;\n /**\n * True if the operation has been completed.\n */\n isCompleted?: boolean;\n /**\n * True if the operation has been cancelled.\n */\n isCancelled?: boolean;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation concluded in a result of an expected type.\n */\n result?: TResult;\n}\n\n/**\n * PollOperation is an interface that defines how to update the local reference of the state of the remote\n * long running operation, just as well as how to request the cancellation of the same operation.\n *\n * It also has a method to serialize the operation so that it can be stored and resumed at any time.\n */\nexport interface PollOperation {\n /**\n * The state of the operation.\n * It will be used to store the basic properties of PollOperationState,\n * plus any custom property that the implementation may require.\n */\n state: TState;\n\n /**\n * Defines how to request the remote service for updates on the status of the long running operation.\n *\n * It optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n * Also optionally receives a \"fireProgress\" function, which, if called, is responsible for triggering the\n * poller's onProgress callbacks.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise>;\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * It returns a promise that should be resolved with an updated version of the poller's operation.\n *\n * @param options - Optional properties passed to the operation's update method.\n *\n * @deprecated `cancel` has been deprecated because it was not implemented.\n */\n cancel(options?: { abortSignal?: AbortSignalLike }): Promise>;\n\n /**\n * Serializes the operation.\n * Useful when wanting to create a poller that monitors an existing operation.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/poller.d.ts b/node_modules/@azure/core-lro/dist/browser/legacy/poller.d.ts new file mode 100644 index 0000000..e748266 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/poller.d.ts @@ -0,0 +1,327 @@ +import { PollOperation, PollOperationState } from "./pollOperation.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollerLike } from "./models.js"; +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export type PollProgressCallback = (state: TState) => void; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + /** controls whether to throw an error if the operation failed or was canceled. */ + protected resolveOnUnsuccessful: boolean; + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-util"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + private processUpdatedState; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/poller.d.ts.map b/node_modules/@azure/core-lro/dist/browser/legacy/poller.d.ts.map new file mode 100644 index 0000000..8ae1b13 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;;GAIG;AACH,MAAM,MAAM,oBAAoB,CAAC,MAAM,IAAI,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;AAEnE;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,KAAK;gBAC/B,OAAO,EAAE,MAAM;CAK5B;AAED;;;GAGG;AACH,qBAAa,oBAAqB,SAAQ,KAAK;gBACjC,OAAO,EAAE,MAAM;CAK5B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AAEH,8BAAsB,MAAM,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO,CAC9E,YAAW,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC;IAEtC,kFAAkF;IAClF,SAAS,CAAC,qBAAqB,EAAE,OAAO,CAAS;IACjD,OAAO,CAAC,OAAO,CAAiB;IAChC,OAAO,CAAC,OAAO,CAAC,CAA2B;IAC3C,OAAO,CAAC,MAAM,CAAC,CAAqE;IACpF,OAAO,CAAC,eAAe,CAAC,CAAgB;IACxC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,OAAO,CAAmB;IAClC,OAAO,CAAC,qBAAqB,CAAsC;IAEnE;;;OAGG;IACH,SAAS,CAAC,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;gBACS,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAmBrD;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,SAAS,CAAC,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEzC;;;OAGG;YACW,YAAY;IAU1B;;;;;;;OAOG;YACW,QAAQ;IAUtB;;;;;;;OAOG;IACH,OAAO,CAAC,YAAY;IAMpB;;OAEG;YACW,UAAU;IAIxB;;;;;;;OAOG;IACI,IAAI,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAW3E,OAAO,CAAC,mBAAmB;IA0B3B;;OAEG;IACU,aAAa,CACxB,WAAW,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAClD,OAAO,CAAC,OAAO,CAAC;IAUnB;;;;;OAKG;IACI,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB;IAOtE;;OAEG;IACI,MAAM,IAAI,OAAO;IAKxB;;OAEG;IACI,WAAW,IAAI,IAAI;IAS1B;;OAEG;IACI,SAAS,IAAI,OAAO;IAI3B;;;;;;;;OAQG;IACI,eAAe,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAStF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB,IAAI,MAAM;IAIlC;;;;;OAKG;IACI,SAAS,IAAI,OAAO,GAAG,SAAS;IAKvC;;;OAGG;IACI,QAAQ,IAAI,MAAM;CAG1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/poller.js b/node_modules/@azure/core-lro/dist/browser/legacy/poller.js new file mode 100644 index 0000000..de6f912 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/poller.js @@ -0,0 +1,407 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +export class Poller { + /** controls whether to throw an error if the operation failed or was canceled. */ + resolveOnUnsuccessful = false; + stopped = true; + resolve; + reject; + pollOncePromise; + cancelPromise; + promise; + pollProgressCallbacks = []; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + operation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } + } + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + // This is needed because the state could have been updated by + // `cancelOperation`, e.g. the operation is canceled or an error occurred. + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/poller.js.map b/node_modules/@azure/core-lro/dist/browser/legacy/poller.js.map new file mode 100644 index 0000000..d3bec33 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/legacy/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAclC;;;GAGG;AACH,MAAM,OAAO,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;IAC5D,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;IAC9D,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AACH,gDAAgD;AAChD,MAAM,OAAgB,MAAM;IAG1B,kFAAkF;IACxE,qBAAqB,GAAY,KAAK,CAAC;IACzC,OAAO,GAAY,IAAI,CAAC;IACxB,OAAO,CAA4B;IACnC,MAAM,CAAsE;IAC5E,eAAe,CAAiB;IAChC,aAAa,CAAiB;IAC9B,OAAO,CAAmB;IAC1B,qBAAqB,GAAmC,EAAE,CAAC;IAEnE;;;OAGG;IACO,SAAS,CAAiC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;IACH,YAAY,SAAyC;QACnD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,EAC1E,EAAE;YACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACvB,CAAC,CACF,CAAC;QACF,mFAAmF;QACnF,sFAAsF;QACtF,mFAAmF;QACnF,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE;YACtB,yBAAyB;QAC3B,CAAC,CAAC,CAAC;IACL,CAAC;IAyBD;;;OAGG;IACK,KAAK,CAAC,YAAY,CAAC,cAAiD,EAAE;QAC5E,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;QACvB,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YAC3C,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAC7B,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QACrB,CAAC;IACH,CAAC;IAED;;;;;;;OAOG;IACK,KAAK,CAAC,QAAQ,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YACnB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;gBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;aAC3C,CAAC,CAAC;QACL,CAAC;QACD,IAAI,CAAC,mBAAmB,EAAE,CAAC;IAC7B,CAAC;IAED;;;;;;;OAOG;IACK,YAAY,CAAC,KAAa;QAChC,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE,CAAC;YAClD,QAAQ,CAAC,KAAK,CAAC,CAAC;QAClB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,UAAU,CAAC,UAA6C,EAAE;QACtE,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACxD,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CAAC,UAA6C,EAAE;QACzD,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,GAAS,EAAE;gBACtC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;YACnC,CAAC,CAAC;YACF,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3F,CAAC;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;IAC9B,CAAC;IAEO,mBAAmB;QACzB,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,IAAI,CAAC,MAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBACzC,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC;YACnC,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;YACrC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,MAAM,KAAK,GAAG,IAAI,oBAAoB,CAAC,wBAAwB,CAAC,CAAC;gBACjE,IAAI,CAAC,MAAO,CAAC,KAAK,CAAC,CAAC;gBACpB,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YAClC,uEAAuE;YACvE,uEAAuE;YACvE,oEAAoE;YACpE,uEAAuE;YACvE,cAAc;YACd,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAa,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,aAAa,CACxB,cAAiD,EAAE;QAEnD,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,CAAC;QACD,8DAA8D;QAC9D,0EAA0E;QAC1E,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAC3B,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;OAKG;IACI,UAAU,CAAC,QAAiC;QACjD,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC1C,OAAO,GAAS,EAAE;YAChB,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;QACxF,CAAC,CAAC;IACJ,CAAC;IAED;;OAEG;IACI,MAAM;QACX,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;gBAChB,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;YACxE,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC;YACxB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QAChD,CAAC;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;QAC3D,CAAC;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB;QACtB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B,CAAC;IAED;;;;;OAKG;IACI,SAAS;QACd,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollerLike } from \"./models.js\";\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When the operation is cancelled, the poller will be rejected with an instance\n * of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n /** controls whether to throw an error if the operation failed or was canceled. */\n protected resolveOnUnsuccessful: boolean = false;\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void,\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n },\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-util\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(pollOptions: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll(pollOptions);\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n }\n this.processUpdatedState();\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n private processUpdatedState(): void {\n if (this.operation.state.error) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n this.reject!(this.operation.state.error);\n throw this.operation.state.error;\n }\n }\n if (this.operation.state.isCancelled) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n const error = new PollerCancelledError(\"Operation was canceled\");\n this.reject!(error);\n throw error;\n }\n }\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.getResult() as TResult);\n }\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(\n pollOptions: { abortSignal?: AbortSignalLike } = {},\n ): Promise {\n if (this.stopped) {\n this.startPolling(pollOptions).catch(this.reject);\n }\n // This is needed because the state could have been updated by\n // `cancelOperation`, e.g. the operation is canceled or an error occurred.\n this.processUpdatedState();\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/logger.d.ts b/node_modules/@azure/core-lro/dist/browser/logger.d.ts new file mode 100644 index 0000000..52138fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/logger.d.ts @@ -0,0 +1,6 @@ +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=logger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/logger.d.ts.map b/node_modules/@azure/core-lro/dist/browser/logger.d.ts.map new file mode 100644 index 0000000..3763b49 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/logger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,eAAO,MAAM,MAAM,qCAAiC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/logger.js b/node_modules/@azure/core-lro/dist/browser/logger.js new file mode 100644 index 0000000..094bfe4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/logger.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export const logger = createClientLogger("core-lro"); +//# sourceMappingURL=logger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/logger.js.map b/node_modules/@azure/core-lro/dist/browser/logger.js.map new file mode 100644 index 0000000..8f21af4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/logger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;;GAGG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/package.json b/node_modules/@azure/core-lro/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-lro/dist/browser/poller/constants.d.ts b/node_modules/@azure/core-lro/dist/browser/poller/constants.d.ts new file mode 100644 index 0000000..a4e88ee --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/constants.d.ts @@ -0,0 +1,9 @@ +/** + * The default time interval to wait before sending the next polling request. + */ +export declare const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export declare const terminalStates: string[]; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/constants.d.ts.map b/node_modules/@azure/core-lro/dist/browser/poller/constants.d.ts.map new file mode 100644 index 0000000..206a7fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,mBAAmB,OAAO,CAAC;AACxC;;GAEG;AACH,eAAO,MAAM,cAAc,UAAsC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/constants.js b/node_modules/@azure/core-lro/dist/browser/poller/constants.js new file mode 100644 index 0000000..e35aad7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/constants.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The default time interval to wait before sending the next polling request. + */ +export const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export const terminalStates = ["succeeded", "canceled", "failed"]; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/constants.js.map b/node_modules/@azure/core-lro/dist/browser/poller/constants.js.map new file mode 100644 index 0000000..539a956 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,CAAC;AACxC;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,WAAW,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The default time interval to wait before sending the next polling request.\n */\nexport const POLL_INTERVAL_IN_MS = 2000;\n/**\n * The closed set of terminal states.\n */\nexport const terminalStates = [\"succeeded\", \"canceled\", \"failed\"];\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/models.d.ts b/node_modules/@azure/core-lro/dist/browser/poller/models.d.ts new file mode 100644 index 0000000..80845ac --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/models.d.ts @@ -0,0 +1,221 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * Configurations for how to poll the operation and to check whether it has + * terminated. + */ +export interface OperationConfig { + /** The operation location */ + operationLocation?: string; + /** The resource location */ + resourceLocation?: string; + /** metadata about the operation */ + metadata?: Record; +} +/** + * The description of an operation. + */ +export interface Operation { + /** + * Sends the initiation request and returns, in addition to the response, the + * operation location, the potential resource location, and a set of metadata. + */ + init: () => Promise; + /** + * Sends the polling request. + */ + poll: (location: string, options?: TOptions) => Promise; +} +/** + * Type of a restorable long-running operation. + */ +export type RestorableOperationState = T & { + /** The operation configuration */ + config: OperationConfig; +}; +/** + * Options for `createPoller`. + */ +export interface CreatePollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: TResponse, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: TResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; +} +export interface LroError { + code: string; + innererror?: InnerError; + message: string; +} +export interface InnerError { + code: string; + message: string; + innererror?: InnerError; +} +/** + * Options for `buildCreatePoller`. + */ +export interface BuildCreatePollerOptions { + /** + * Gets the status of the operation from the response received when the + * operation was initialized. Note that the operation could be already in + * a terminal state at this time. + */ + getStatusFromInitialResponse: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + /** + * Gets the status of the operation from a response received when the + * operation was polled. + */ + getStatusFromPollResponse: (response: TResponse, state: RestorableOperationState) => OperationStatus; + /** + * Determines if the input error is an operation error. + */ + isOperationError: (error: Error) => boolean; + /** + * Gets the updated operation location from polling responses. + */ + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets the resource location from a response. + */ + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets from the response the time interval the service suggests the client to + * wait before sending the next polling request. + */ + getPollingInterval?: (response: TResponse) => number | undefined; + /** + * Extracts an error model from a response. + */ + getError?: (response: TResponse) => LroError | undefined; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful: boolean; +} +/** + * The set of possible states an operation can be in at any given time. + */ +export type OperationStatus = "notStarted" | "running" | "succeeded" | "canceled" | "failed"; +/** + * While the poller works as the local control mechanism to start triggering and + * wait for a long-running operation, OperationState documents the status of + * the remote long-running operation. It gets updated after each poll. + */ +export interface OperationState { + /** + * The current status of the operation. + */ + status: OperationStatus; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation produced a result of the expected type. + */ + result?: TResult; +} +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export type CancelOnProgress = () => void; +/** + * A simple poller interface. + */ +export interface SimplePollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Returns the state of the operation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +/** + * A state proxy that allows poller implementation to abstract away the operation + * state. This is useful to implement `lroEngine` and `createPoller` in a modular + * way. + */ +export interface StateProxy { + initState: (config: OperationConfig) => RestorableOperationState; + setRunning: (state: TState) => void; + setCanceled: (state: TState) => void; + setResult: (state: TState, result: TResult) => void; + setError: (state: TState, error: Error) => void; + setFailed: (state: TState) => void; + setSucceeded: (state: TState) => void; + isRunning: (state: TState) => boolean; + isCanceled: (state: TState) => boolean; + getResult: (state: TState) => TResult | undefined; + getError: (state: TState) => Error | undefined; + isFailed: (state: TState) => boolean; + isSucceeded: (state: TState) => boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/models.d.ts.map b/node_modules/@azure/core-lro/dist/browser/poller/models.d.ts.map new file mode 100644 index 0000000..6e85a3f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,6BAA6B;IAC7B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,4BAA4B;IAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,mCAAmC;IACnC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACnC;AAED;;GAEG;AACH,MAAM,WAAW,SAAS,CAAC,SAAS,EAAE,QAAQ;IAC5C;;;OAGG;IACH,IAAI,EAAE,MAAM,OAAO,CACjB,eAAe,GAAG;QAChB,QAAQ,EAAE,SAAS,CAAC;KACrB,CACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,KAAK,OAAO,CAAC,SAAS,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,MAAM,wBAAwB,CAAC,CAAC,IAAI,CAAC,GAAG;IAC5C,kCAAkC;IAClC,MAAM,EAAE,eAAe,CAAC;CACzB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM;IAC7D;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;CAC7D;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB,CAAC,SAAS,EAAE,MAAM;IACzD;;;;OAIG;IACH,4BAA4B,EAAE,CAAC,MAAM,EAAE;QACrC,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB;;;OAGG;IACH,yBAAyB,EAAE,CACzB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB;;OAEG;IACH,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C;;OAEG;IACH,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;OAEG;IACH,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD;;OAEG;IACH,qBAAqB,EAAE,OAAO,CAAC;CAChC;AAED;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,YAAY,GAAG,SAAS,GAAG,WAAW,GAAG,UAAU,GAAG,QAAQ,CAAC;AAE7F;;;;GAIG;AACH,MAAM,WAAW,cAAc,CAAC,OAAO;IACrC;;OAEG;IACH,MAAM,EAAE,eAAe,CAAC;IACxB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC;AAE1C;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAAE,OAAO;IAC/E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;OAEG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU,CAAC,MAAM,EAAE,OAAO;IACzC,SAAS,EAAE,CAAC,MAAM,EAAE,eAAe,KAAK,wBAAwB,CAAC,MAAM,CAAC,CAAC;IAEzE,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,KAAK,IAAI,CAAC;IACpD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IAChD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,YAAY,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACtC,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACvC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,SAAS,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,KAAK,GAAG,SAAS,CAAC;IAC/C,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACrC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;CACzC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/models.js b/node_modules/@azure/core-lro/dist/browser/poller/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/models.js.map b/node_modules/@azure/core-lro/dist/browser/poller/models.js.map new file mode 100644 index 0000000..554eb0c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Configurations for how to poll the operation and to check whether it has\n * terminated.\n */\nexport interface OperationConfig {\n /** The operation location */\n operationLocation?: string;\n /** The resource location */\n resourceLocation?: string;\n /** metadata about the operation */\n metadata?: Record;\n}\n\n/**\n * The description of an operation.\n */\nexport interface Operation {\n /**\n * Sends the initiation request and returns, in addition to the response, the\n * operation location, the potential resource location, and a set of metadata.\n */\n init: () => Promise<\n OperationConfig & {\n response: TResponse;\n }\n >;\n /**\n * Sends the polling request.\n */\n poll: (location: string, options?: TOptions) => Promise;\n}\n\n/**\n * Type of a restorable long-running operation.\n */\nexport type RestorableOperationState = T & {\n /** The operation configuration */\n config: OperationConfig;\n};\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreatePollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: TResponse, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: TResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n}\n\nexport interface LroError {\n code: string;\n innererror?: InnerError;\n message: string;\n}\n\nexport interface InnerError {\n code: string;\n message: string;\n innererror?: InnerError;\n}\n\n/**\n * Options for `buildCreatePoller`.\n */\nexport interface BuildCreatePollerOptions {\n /**\n * Gets the status of the operation from the response received when the\n * operation was initialized. Note that the operation could be already in\n * a terminal state at this time.\n */\n getStatusFromInitialResponse: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n /**\n * Gets the status of the operation from a response received when the\n * operation was polled.\n */\n getStatusFromPollResponse: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n /**\n * Determines if the input error is an operation error.\n */\n isOperationError: (error: Error) => boolean;\n /**\n * Gets the updated operation location from polling responses.\n */\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets the resource location from a response.\n */\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets from the response the time interval the service suggests the client to\n * wait before sending the next polling request.\n */\n getPollingInterval?: (response: TResponse) => number | undefined;\n /**\n * Extracts an error model from a response.\n */\n getError?: (response: TResponse) => LroError | undefined;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful: boolean;\n}\n\n/**\n * The set of possible states an operation can be in at any given time.\n */\nexport type OperationStatus = \"notStarted\" | \"running\" | \"succeeded\" | \"canceled\" | \"failed\";\n\n/**\n * While the poller works as the local control mechanism to start triggering and\n * wait for a long-running operation, OperationState documents the status of\n * the remote long-running operation. It gets updated after each poll.\n */\nexport interface OperationState {\n /**\n * The current status of the operation.\n */\n status: OperationStatus;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation produced a result of the expected type.\n */\n result?: TResult;\n}\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * A simple poller interface.\n */\nexport interface SimplePollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Returns the state of the operation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A state proxy that allows poller implementation to abstract away the operation\n * state. This is useful to implement `lroEngine` and `createPoller` in a modular\n * way.\n */\nexport interface StateProxy {\n initState: (config: OperationConfig) => RestorableOperationState;\n\n setRunning: (state: TState) => void;\n setCanceled: (state: TState) => void;\n setResult: (state: TState, result: TResult) => void;\n setError: (state: TState, error: Error) => void;\n setFailed: (state: TState) => void;\n setSucceeded: (state: TState) => void;\n\n isRunning: (state: TState) => boolean;\n isCanceled: (state: TState) => boolean;\n getResult: (state: TState) => TResult | undefined;\n getError: (state: TState) => Error | undefined;\n isFailed: (state: TState) => boolean;\n isSucceeded: (state: TState) => boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/operation.d.ts b/node_modules/@azure/core-lro/dist/browser/poller/operation.d.ts new file mode 100644 index 0000000..a23a90f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/operation.d.ts @@ -0,0 +1,40 @@ +import { LroError, Operation, OperationStatus, RestorableOperationState, StateProxy } from "./models.js"; +/** + * Deserializes the state + */ +export declare function deserializeState(serializedState: string): RestorableOperationState; +/** + * Initiates the long-running operation. + */ +export declare function initOperation(inputs: { + init: Operation["init"]; + stateProxy: StateProxy; + getOperationStatus: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + processResult?: (result: TResponse, state: TState) => TResult; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + setErrorAsResult: boolean; +}): Promise>; +/** Polls the long-running operation. */ +export declare function pollOperation(inputs: { + poll: Operation["poll"]; + stateProxy: StateProxy; + state: RestorableOperationState; + getOperationStatus: (response: TResponse, state: RestorableOperationState) => OperationStatus; + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + isOperationError: (error: Error) => boolean; + getPollingInterval?: (response: TResponse) => number | undefined; + setDelay: (intervalInMs: number) => void; + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + processResult?: (result: TResponse, state: TState) => TResult; + getError?: (response: TResponse) => LroError | undefined; + updateState?: (state: TState, lastResponse: TResponse) => void; + isDone?: (lastResponse: TResponse, state: TState) => boolean; + setErrorAsResult: boolean; + options?: TOptions; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/operation.d.ts.map b/node_modules/@azure/core-lro/dist/browser/poller/operation.d.ts.map new file mode 100644 index 0000000..d8a62db --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,QAAQ,EAER,SAAS,EACT,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,aAAa,CAAC;AAIrB;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,MAAM,EACrC,eAAe,EAAE,MAAM,GACtB,wBAAwB,CAAC,MAAM,CAAC,CAMlC;AAuGD;;GAEG;AACH,wBAAsB,aAAa,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IACtE,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC;IAC5C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAAC,MAAM,EAAE;QAC3B,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAqB5C;AA4DD,wCAAwC;AACxC,wBAAsB,aAAa,CAAC,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;IAChF,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC;IAC7C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAClB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC7D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,OAAO,CAAC,EAAE,QAAQ,CAAC;CACpB,GAAG,OAAO,CAAC,IAAI,CAAC,CAsDhB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/operation.js b/node_modules/@azure/core-lro/dist/browser/poller/operation.js new file mode 100644 index 0000000..869eaa5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/operation.js @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger } from "../logger.js"; +import { terminalStates } from "./constants.js"; +/** + * Deserializes the state + */ +export function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } +} +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; +} +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError?.(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if (isDone?.(response, state) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +export async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation?.(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } + } + return { response, status }; +} +/** Polls the long-running operation. */ +export async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!terminalStates.includes(status)) { + const intervalInMs = getPollingInterval?.(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation?.(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation?.(location, isUpdated); + } + else + withOperationLocation?.(operationLocation, false); + } + updateState?.(state, response); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/operation.js.map b/node_modules/@azure/core-lro/dist/browser/poller/operation.js.map new file mode 100644 index 0000000..849e973 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAUlC,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAEhD;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAC9B,eAAuB;IAEvB,IAAI,CAAC;QACH,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;IAC3C,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAI,KAAK,CAAC,sCAAsC,eAAe,EAAE,CAAC,CAAC;IAC3E,CAAC;AACH,CAAC;AAED,SAAS,aAAa,CAAkB,MAIvC;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IACvD,OAAO,CAAC,KAAY,EAAE,EAAE;QACtB,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC;YAC5B,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YAClC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QAC9B,CAAC;QACD,MAAM,KAAK,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;QAC9B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;IAC1B,CAAC;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED,SAAS,aAAa,CAAC,GAAa;IAIlC,IAAI,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC;IAC1B,IAAI,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACpB,IAAI,MAAM,GAAG,GAAiB,CAAC;IAC/B,OAAO,MAAM,CAAC,UAAU,EAAE,CAAC;QACzB,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC;QAC3B,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACnB,OAAO,GAAG,0BAA0B,CAAC,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAChE,CAAC;IACD,OAAO;QACL,IAAI;QACJ,OAAO;KACR,CAAC;AACJ,CAAC;AAED,SAAS,sBAAsB,CAA6B,MAS3D;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,QAAQ,EAAE,gBAAgB,EAAE,GAC9F,MAAM,CAAC;IACT,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,WAAW,CAAC,CAAC,CAAC;YACjB,UAAU,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;YAC/B,MAAM;QACR,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,GAAG,GAAG,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC;YACjC,IAAI,OAAO,GAAG,EAAE,CAAC;YACjB,IAAI,GAAG,EAAE,CAAC;gBACR,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;gBAC7C,OAAO,GAAG,KAAK,IAAI,KAAK,OAAO,EAAE,CAAC;YACpC,CAAC;YACD,MAAM,MAAM,GAAG,wCAAwC,OAAO,EAAE,CAAC;YACjE,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;YAC9C,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC5B,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACvB,MAAM;QACR,CAAC;QACD,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,UAAU,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;YAC9B,MAAM;QACR,CAAC;IACH,CAAC;IACD,IACE,MAAM,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC;QACzB,CAAC,MAAM,KAAK,SAAS;YACnB,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EACxF,CAAC;QACD,UAAU,CAAC,SAAS,CAClB,KAAK,EACL,WAAW,CAAC;YACV,QAAQ;YACR,KAAK;YACL,aAAa;SACd,CAAC,CACH,CAAC;IACJ,CAAC;AACH,CAAC;AAED,SAAS,WAAW,CAA6B,MAIhD;IACC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;IAClD,OAAO,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAE,QAA+B,CAAC;AAC3F,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAA6B,MAW/D;IACC,MAAM,EACJ,IAAI,EACJ,UAAU,EACV,aAAa,EACb,kBAAkB,EAClB,qBAAqB,EACrB,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC;IACjF,IAAI,iBAAiB;QAAE,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;IACzE,MAAM,MAAM,GAAG;QACb,QAAQ;QACR,iBAAiB;QACjB,gBAAgB;KACjB,CAAC;IACF,MAAM,CAAC,OAAO,CAAC,6BAA6B,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,KAAK,GAAG,UAAU,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,MAAM,GAAG,kBAAkB,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAC1E,sBAAsB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,gBAAgB,EAAE,aAAa,EAAE,CAAC,CAAC;IACjG,OAAO,KAAK,CAAC;AACf,CAAC;AAED,KAAK,UAAU,mBAAmB,CAAuC,MAexE;IAIC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,iBAAiB,EACjB,kBAAkB,EAClB,mBAAmB,EACnB,gBAAgB,EAChB,OAAO,GACR,GAAG,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC,KAAK,CAC3D,aAAa,CAAC;QACZ,KAAK;QACL,UAAU;QACV,gBAAgB;KACjB,CAAC,CACH,CAAC;IACF,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IACnD,MAAM,CAAC,OAAO,CACZ,iCACE,KAAK,CAAC,MAAM,CAAC,iBACf,yBAAyB,MAAM,uBAC7B,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAChD,EAAE,CACH,CAAC;IACF,IAAI,MAAM,KAAK,WAAW,EAAE,CAAC;QAC3B,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC9D,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,OAAO;gBACL,QAAQ,EAAE,MAAM,IAAI,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAC1C,aAAa,CAAC,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,CAAC,CACvD;gBACD,MAAM;aACP,CAAC;QACJ,CAAC;IACH,CAAC;IACD,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;AAC9B,CAAC;AAED,wCAAwC;AACxC,MAAM,CAAC,KAAK,UAAU,aAAa,CAAuC,MA0BzE;IACC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,OAAO,EACP,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,aAAa,EACb,QAAQ,EACR,WAAW,EACX,QAAQ,EACR,MAAM,EACN,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;IAC3C,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;QACpC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAmB,CAAC;YACrD,IAAI;YACJ,kBAAkB;YAClB,KAAK;YACL,UAAU;YACV,iBAAiB;YACjB,mBAAmB;YACnB,gBAAgB;YAChB,OAAO;SACR,CAAC,CAAC;QACH,sBAAsB,CAAC;YACrB,MAAM;YACN,QAAQ;YACR,KAAK;YACL,UAAU;YACV,MAAM;YACN,aAAa;YACb,QAAQ;YACR,gBAAgB;SACjB,CAAC,CAAC;QAEH,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrC,MAAM,YAAY,GAAG,kBAAkB,EAAE,CAAC,QAAQ,CAAC,CAAC;YACpD,IAAI,YAAY;gBAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;YACzC,MAAM,QAAQ,GAAG,oBAAoB,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YACzD,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;gBAC3B,MAAM,SAAS,GAAG,iBAAiB,KAAK,QAAQ,CAAC;gBACjD,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,QAAQ,CAAC;gBAC1C,qBAAqB,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;YAC/C,CAAC;;gBAAM,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC3D,CAAC;QACD,WAAW,EAAE,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IACjC,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroError,\n InnerError,\n Operation,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"./models.js\";\nimport { logger } from \"../logger.js\";\nimport { terminalStates } from \"./constants.js\";\n\n/**\n * Deserializes the state\n */\nexport function deserializeState(\n serializedState: string,\n): RestorableOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`Unable to deserialize input state: ${serializedState}`);\n }\n}\n\nfunction setStateError(inputs: {\n state: TState;\n stateProxy: StateProxy;\n isOperationError: (error: Error) => boolean;\n}): (error: Error) => never {\n const { state, stateProxy, isOperationError } = inputs;\n return (error: Error) => {\n if (isOperationError(error)) {\n stateProxy.setError(state, error);\n stateProxy.setFailed(state);\n }\n throw error;\n };\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\nfunction simplifyError(err: LroError): {\n code: string;\n message: string;\n} {\n let message = err.message;\n let code = err.code;\n let curErr = err as InnerError;\n while (curErr.innererror) {\n curErr = curErr.innererror;\n code = curErr.code;\n message = appendReadableErrorMessage(message, curErr.message);\n }\n return {\n code,\n message,\n };\n}\n\nfunction processOperationStatus(result: {\n status: OperationStatus;\n response: TResponse;\n state: RestorableOperationState;\n stateProxy: StateProxy;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n}): void {\n const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } =\n result;\n switch (status) {\n case \"succeeded\": {\n stateProxy.setSucceeded(state);\n break;\n }\n case \"failed\": {\n const err = getError?.(response);\n let postfix = \"\";\n if (err) {\n const { code, message } = simplifyError(err);\n postfix = `. ${code}. ${message}`;\n }\n const errStr = `The long-running operation has failed${postfix}`;\n stateProxy.setError(state, new Error(errStr));\n stateProxy.setFailed(state);\n logger.warning(errStr);\n break;\n }\n case \"canceled\": {\n stateProxy.setCanceled(state);\n break;\n }\n }\n if (\n isDone?.(response, state) ||\n (isDone === undefined &&\n [\"succeeded\", \"canceled\"].concat(setErrorAsResult ? [] : [\"failed\"]).includes(status))\n ) {\n stateProxy.setResult(\n state,\n buildResult({\n response,\n state,\n processResult,\n }),\n );\n }\n}\n\nfunction buildResult(inputs: {\n response: TResponse;\n state: TState;\n processResult?: (result: TResponse, state: TState) => TResult;\n}): TResult {\n const { processResult, response, state } = inputs;\n return processResult ? processResult(response, state) : (response as unknown as TResult);\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initOperation(inputs: {\n init: Operation[\"init\"];\n stateProxy: StateProxy;\n getOperationStatus: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n processResult?: (result: TResponse, state: TState) => TResult;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n setErrorAsResult: boolean;\n}): Promise> {\n const {\n init,\n stateProxy,\n processResult,\n getOperationStatus,\n withOperationLocation,\n setErrorAsResult,\n } = inputs;\n const { operationLocation, resourceLocation, metadata, response } = await init();\n if (operationLocation) withOperationLocation?.(operationLocation, false);\n const config = {\n metadata,\n operationLocation,\n resourceLocation,\n };\n logger.verbose(`LRO: Operation description:`, config);\n const state = stateProxy.initState(config);\n const status = getOperationStatus({ response, state, operationLocation });\n processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult });\n return state;\n}\n\nasync function pollOperationHelper(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n operationLocation: string;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n options?: TOptions;\n}): Promise<{\n status: OperationStatus;\n response: TResponse;\n}> {\n const {\n poll,\n state,\n stateProxy,\n operationLocation,\n getOperationStatus,\n getResourceLocation,\n isOperationError,\n options,\n } = inputs;\n const response = await poll(operationLocation, options).catch(\n setStateError({\n state,\n stateProxy,\n isOperationError,\n }),\n );\n const status = getOperationStatus(response, state);\n logger.verbose(\n `LRO: Status:\\n\\tPolling from: ${\n state.config.operationLocation\n }\\n\\tOperation status: ${status}\\n\\tPolling status: ${\n terminalStates.includes(status) ? \"Stopped\" : \"Running\"\n }`,\n );\n if (status === \"succeeded\") {\n const resourceLocation = getResourceLocation(response, state);\n if (resourceLocation !== undefined) {\n return {\n response: await poll(resourceLocation).catch(\n setStateError({ state, stateProxy, isOperationError }),\n ),\n status,\n };\n }\n }\n return { response, status };\n}\n\n/** Polls the long-running operation. */\nexport async function pollOperation(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n getPollingInterval?: (response: TResponse) => number | undefined;\n setDelay: (intervalInMs: number) => void;\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n updateState?: (state: TState, lastResponse: TResponse) => void;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n options?: TOptions;\n}): Promise {\n const {\n poll,\n state,\n stateProxy,\n options,\n getOperationStatus,\n getResourceLocation,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n processResult,\n getError,\n updateState,\n setDelay,\n isDone,\n setErrorAsResult,\n } = inputs;\n const { operationLocation } = state.config;\n if (operationLocation !== undefined) {\n const { response, status } = await pollOperationHelper({\n poll,\n getOperationStatus,\n state,\n stateProxy,\n operationLocation,\n getResourceLocation,\n isOperationError,\n options,\n });\n processOperationStatus({\n status,\n response,\n state,\n stateProxy,\n isDone,\n processResult,\n getError,\n setErrorAsResult,\n });\n\n if (!terminalStates.includes(status)) {\n const intervalInMs = getPollingInterval?.(response);\n if (intervalInMs) setDelay(intervalInMs);\n const location = getOperationLocation?.(response, state);\n if (location !== undefined) {\n const isUpdated = operationLocation !== location;\n state.config.operationLocation = location;\n withOperationLocation?.(location, isUpdated);\n } else withOperationLocation?.(operationLocation, false);\n }\n updateState?.(state, response);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/poller.d.ts b/node_modules/@azure/core-lro/dist/browser/poller/poller.d.ts new file mode 100644 index 0000000..dc9e8fa --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/poller.d.ts @@ -0,0 +1,9 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { BuildCreatePollerOptions, CreatePollerOptions, Operation, OperationState, SimplePollerLike } from "./models.js"; +/** + * Returns a poller factory. + */ +export declare function buildCreatePoller>(inputs: BuildCreatePollerOptions): (lro: Operation, options?: CreatePollerOptions) => Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/poller.d.ts.map b/node_modules/@azure/core-lro/dist/browser/poller/poller.d.ts.map new file mode 100644 index 0000000..b300b2e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EACL,wBAAwB,EACxB,mBAAmB,EACnB,SAAS,EACT,cAAc,EAEd,gBAAgB,EAEjB,MAAM,aAAa,CAAC;AA8BrB;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAC1F,MAAM,EAAE,wBAAwB,CAAC,SAAS,EAAE,MAAM,CAAC,GAClD,CACD,GAAG,EAAE,SAAS,CAAC,SAAS,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,CAAC,EAC5D,OAAO,CAAC,EAAE,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC,KACtD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CA6J9C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/poller.js b/node_modules/@azure/core-lro/dist/browser/poller/poller.js new file mode 100644 index 0000000..5fc9b09 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/poller.js @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { deserializeState, initOperation, pollOperation } from "./operation.js"; +import { POLL_INTERVAL_IN_MS } from "./constants.js"; +import { delay } from "@azure/core-util"; +const createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => (state.status = "canceled"), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.status = "running"), + setSucceeded: (state) => (state.status = "succeeded"), + setFailed: (state) => (state.status = "failed"), + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded", +}); +/** + * Returns a poller factory. + */ +export function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback + ? (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() + : undefined; + const state = restoreFrom + ? deserializeState(restoreFrom) + : await initOperation({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === undefined, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state, + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + // In the future we can use AbortSignal.any() instead + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal?.aborted) { + abortController.abort(); + } + else if (!abortSignal.aborted) { + inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await delay(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } + finally { + inputAbortSignal?.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } + else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = undefined; + })), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } + else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await pollOperation({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful, + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + }, + }; + return poller; + }; +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/poller.js.map b/node_modules/@azure/core-lro/dist/browser/poller/poller.js.map new file mode 100644 index 0000000..34902cf --- /dev/null +++ b/node_modules/@azure/core-lro/dist/browser/poller/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC,OAAO,EAAE,gBAAgB,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AACrD,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT;;;;OAIG;IACH,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAAQ;IAC7D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,UAAU,CAAC;IACnD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,SAAS,CAAC;IACjD,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,WAAW,CAAC;IACrD,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC;IAE/C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,UAAU;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,QAAQ;IAC9C,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;IAChD,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,WAAW;CACrD,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,UAAU,iBAAiB,CAC/B,MAAmD;IAKnD,MAAM,EACJ,oBAAoB,EACpB,4BAA4B,EAC5B,yBAAyB,EACzB,gBAAgB,EAChB,mBAAmB,EACnB,kBAAkB,EAClB,QAAQ,EACR,qBAAqB,GACtB,GAAG,MAAM,CAAC;IACX,OAAO,KAAK,EACV,EAAE,IAAI,EAAE,IAAI,EAA2D,EACvE,OAAyD,EACzD,EAAE;QACF,MAAM,EACJ,aAAa,EACb,WAAW,EACX,qBAAqB,EAAE,6BAA6B,EACpD,YAAY,GAAG,mBAAmB,EAClC,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,MAAM,qBAAqB,GAAG,6BAA6B;YACzD,CAAC,CAAC,CAAC,GAAG,EAAE;gBACJ,IAAI,MAAM,GAAG,KAAK,CAAC;gBACnB,OAAO,CAAC,iBAAyB,EAAE,SAAkB,EAAE,EAAE;oBACvD,IAAI,SAAS;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;yBAC3D,IAAI,CAAC,MAAM;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;oBACnE,MAAM,GAAG,IAAI,CAAC;gBAChB,CAAC,CAAC;YACJ,CAAC,CAAC,EAAE;YACN,CAAC,CAAC,SAAS,CAAC;QACd,MAAM,KAAK,GAAqC,WAAW;YACzD,CAAC,CAAC,gBAAgB,CAAC,WAAW,CAAC;YAC/B,CAAC,CAAC,MAAM,aAAa,CAAC;gBAClB,IAAI;gBACJ,UAAU;gBACV,aAAa;gBACb,kBAAkB,EAAE,4BAA4B;gBAChD,qBAAqB;gBACrB,gBAAgB,EAAE,CAAC,qBAAqB;aACzC,CAAC,CAAC;QACP,IAAI,aAA2C,CAAC;QAChD,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAG9C,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAmB,CAAC;QAC5C,MAAM,oBAAoB,GAAG,KAAK,IAAmB,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;QAC1F,MAAM,YAAY,GAAG,wBAAwB,CAAC;QAC9C,IAAI,uBAAuB,GAAG,YAAY,CAAC;QAE3C,MAAM,MAAM,GAAsC;YAChD,iBAAiB,EAAE,GAAG,EAAE,CAAC,KAAK;YAC9B,SAAS,EAAE,GAAG,EAAE,CAAC,KAAK,CAAC,MAAM;YAC7B,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,WAAW,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC;YACxE,SAAS,EAAE,GAAG,EAAE,CAAC,aAAa,KAAK,SAAS;YAC5C,WAAW,EAAE,GAAG,EAAE;gBAChB,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC;YACD,QAAQ,EAAE,GAAG,EAAE,CACb,IAAI,CAAC,SAAS,CAAC;gBACb,KAAK;aACN,CAAC;YACJ,UAAU,EAAE,CAAC,QAAiC,EAAE,EAAE;gBAChD,MAAM,CAAC,GAAG,MAAM,EAAE,CAAC;gBACnB,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;gBAC1B,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;YACD,aAAa,EAAE,CAAC,WAA+C,EAAE,EAAE,CACjE,CAAC,aAAa,KAAK,CAAC,KAAK,IAAI,EAAE;gBAC7B,MAAM,EAAE,WAAW,EAAE,gBAAgB,EAAE,GAAG,WAAW,IAAI,EAAE,CAAC;gBAC5D,qDAAqD;gBACrD,SAAS,aAAa;oBACpB,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;gBACD,MAAM,WAAW,GAAG,eAAe,CAAC,MAAM,CAAC;gBAC3C,IAAI,gBAAgB,EAAE,OAAO,EAAE,CAAC;oBAC9B,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;qBAAM,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;oBAChC,gBAAgB,EAAE,gBAAgB,CAAC,OAAO,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBAED,IAAI,CAAC;oBACH,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;wBACrB,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACnC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;4BACxB,MAAM,KAAK,CAAC,uBAAuB,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;4BACtD,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACrC,CAAC;oBACH,CAAC;gBACH,CAAC;wBAAS,CAAC;oBACT,gBAAgB,EAAE,mBAAmB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;gBAChE,CAAC;gBACD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;gBACvC,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;wBACvC,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;wBACpB,KAAK,YAAY,CAAC;wBAClB,KAAK,SAAS;4BACZ,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;oBACvE,CAAC;gBACH,CAAC;YACH,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE;gBAChB,aAAa,GAAG,SAAS,CAAC;YAC5B,CAAC,CAAC,CAAC;YACL,KAAK,CAAC,IAAI,CAAC,WAA+C;gBACxD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,IAAI,MAAM,CAAC,MAAM,EAAE;wBAAE,OAAO;gBAC9B,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO;wBACT,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,CAAC;oBAClB,IAAI;oBACJ,KAAK;oBACL,UAAU;oBACV,oBAAoB;oBACpB,gBAAgB;oBAChB,qBAAqB;oBACrB,kBAAkB;oBAClB,kBAAkB,EAAE,yBAAyB;oBAC7C,mBAAmB;oBACnB,aAAa;oBACb,QAAQ;oBACR,WAAW;oBACX,OAAO,EAAE,WAAW;oBACpB,QAAQ,EAAE,CAAC,gBAAgB,EAAE,EAAE;wBAC7B,uBAAuB,GAAG,gBAAgB,CAAC;oBAC7C,CAAC;oBACD,gBAAgB,EAAE,CAAC,qBAAqB;iBACzC,CAAC,CAAC;gBACH,MAAM,oBAAoB,EAAE,CAAC;gBAC7B,IAAI,CAAC,qBAAqB,EAAE,CAAC;oBAC3B,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;YACH,CAAC;SACF,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n BuildCreatePollerOptions,\n CreatePollerOptions,\n Operation,\n OperationState,\n RestorableOperationState,\n SimplePollerLike,\n StateProxy,\n} from \"./models.js\";\nimport { deserializeState, initOperation, pollOperation } from \"./operation.js\";\nimport { POLL_INTERVAL_IN_MS } from \"./constants.js\";\nimport { delay } from \"@azure/core-util\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n /**\n * The state at this point is created to be of type OperationState.\n * It will be updated later to be of type TState when the\n * customer-provided callback, `updateState`, is called during polling.\n */\n initState: (config) => ({ status: \"running\", config }) as any,\n setCanceled: (state) => (state.status = \"canceled\"),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.status = \"running\"),\n setSucceeded: (state) => (state.status = \"succeeded\"),\n setFailed: (state) => (state.status = \"failed\"),\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => state.status === \"canceled\",\n isFailed: (state) => state.status === \"failed\",\n isRunning: (state) => state.status === \"running\",\n isSucceeded: (state) => state.status === \"succeeded\",\n});\n\n/**\n * Returns a poller factory.\n */\nexport function buildCreatePoller>(\n inputs: BuildCreatePollerOptions,\n): (\n lro: Operation,\n options?: CreatePollerOptions,\n) => Promise> {\n const {\n getOperationLocation,\n getStatusFromInitialResponse,\n getStatusFromPollResponse,\n isOperationError,\n getResourceLocation,\n getPollingInterval,\n getError,\n resolveOnUnsuccessful,\n } = inputs;\n return async (\n { init, poll }: Operation,\n options?: CreatePollerOptions,\n ) => {\n const {\n processResult,\n updateState,\n withOperationLocation: withOperationLocationCallback,\n intervalInMs = POLL_INTERVAL_IN_MS,\n restoreFrom,\n } = options || {};\n const stateProxy = createStateProxy();\n const withOperationLocation = withOperationLocationCallback\n ? (() => {\n let called = false;\n return (operationLocation: string, isUpdated: boolean) => {\n if (isUpdated) withOperationLocationCallback(operationLocation);\n else if (!called) withOperationLocationCallback(operationLocation);\n called = true;\n };\n })()\n : undefined;\n const state: RestorableOperationState = restoreFrom\n ? deserializeState(restoreFrom)\n : await initOperation({\n init,\n stateProxy,\n processResult,\n getOperationStatus: getStatusFromInitialResponse,\n withOperationLocation,\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n let resultPromise: Promise | undefined;\n const abortController = new AbortController();\n // Progress handlers\n type Handler = (state: TState) => void;\n const handlers = new Map();\n const handleProgressEvents = async (): Promise => handlers.forEach((h) => h(state));\n const cancelErrMsg = \"Operation was canceled\";\n let currentPollIntervalInMs = intervalInMs;\n\n const poller: SimplePollerLike = {\n getOperationState: () => state,\n getResult: () => state.result,\n isDone: () => [\"succeeded\", \"failed\", \"canceled\"].includes(state.status),\n isStopped: () => resultPromise === undefined,\n stopPolling: () => {\n abortController.abort();\n },\n toString: () =>\n JSON.stringify({\n state,\n }),\n onProgress: (callback: (state: TState) => void) => {\n const s = Symbol();\n handlers.set(s, callback);\n return () => handlers.delete(s);\n },\n pollUntilDone: (pollOptions?: { abortSignal?: AbortSignalLike }) =>\n (resultPromise ??= (async () => {\n const { abortSignal: inputAbortSignal } = pollOptions || {};\n // In the future we can use AbortSignal.any() instead\n function abortListener(): void {\n abortController.abort();\n }\n const abortSignal = abortController.signal;\n if (inputAbortSignal?.aborted) {\n abortController.abort();\n } else if (!abortSignal.aborted) {\n inputAbortSignal?.addEventListener(\"abort\", abortListener, { once: true });\n }\n\n try {\n if (!poller.isDone()) {\n await poller.poll({ abortSignal });\n while (!poller.isDone()) {\n await delay(currentPollIntervalInMs, { abortSignal });\n await poller.poll({ abortSignal });\n }\n }\n } finally {\n inputAbortSignal?.removeEventListener(\"abort\", abortListener);\n }\n if (resolveOnUnsuccessful) {\n return poller.getResult() as TResult;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return poller.getResult() as TResult;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n case \"notStarted\":\n case \"running\":\n throw new Error(`Polling completed without succeeding or failing`);\n }\n }\n })().finally(() => {\n resultPromise = undefined;\n })),\n async poll(pollOptions?: { abortSignal?: AbortSignalLike }): Promise {\n if (resolveOnUnsuccessful) {\n if (poller.isDone()) return;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n await pollOperation({\n poll,\n state,\n stateProxy,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n getOperationStatus: getStatusFromPollResponse,\n getResourceLocation,\n processResult,\n getError,\n updateState,\n options: pollOptions,\n setDelay: (pollIntervalInMs) => {\n currentPollIntervalInMs = pollIntervalInMs;\n },\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n await handleProgressEvents();\n if (!resolveOnUnsuccessful) {\n switch (state.status) {\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n },\n };\n return poller;\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/models.d.ts b/node_modules/@azure/core-lro/dist/commonjs/http/models.d.ts new file mode 100644 index 0000000..c6ebb32 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/models.d.ts @@ -0,0 +1,106 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { LroError } from "../poller/models.js"; +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; +/** + * The type of a LRO response body. This is just a convenience type for checking the status of the operation. + */ +export interface ResponseBody extends Record { + /** The status of the operation. */ + status?: unknown; + /** The state of the provisioning process */ + provisioningState?: unknown; + /** The properties of the provisioning process */ + properties?: { + provisioningState?: unknown; + } & Record; + /** The error if the operation failed */ + error?: Partial; + /** The location of the created resource */ + resourceLocation?: string; +} +/** + * Simple type of the raw response. + */ +export interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} +/** + * The type of the response of a LRO. + */ +export interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} +/** + * Description of a long running operation. + */ +export interface LongRunningOperation { + /** + * The request path. This should be set if the operation is a PUT and needs + * to poll from the same request path. + */ + requestPath?: string; + /** + * The HTTP request method. This should be set if the operation is a PUT or a + * DELETE. + */ + requestMethod?: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string, options?: { + abortSignal?: AbortSignalLike; + }) => Promise>; +} +export type HttpOperationMode = "OperationLocation" | "ResourceLocation" | "Body"; +/** + * Options for `createPoller`. + */ +export interface CreateHttpPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + resourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, response: LroResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/models.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/http/models.d.ts.map new file mode 100644 index 0000000..8c69ed8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAG/C;;GAEG;AACH,MAAM,MAAM,yBAAyB,GAAG,uBAAuB,GAAG,UAAU,GAAG,cAAc,CAAC;AAE9F;;GAEG;AAEH,MAAM,WAAW,YAAa,SAAQ,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAC3D,mCAAmC;IACnC,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,4CAA4C;IAC5C,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,iDAAiD;IACjD,UAAU,CAAC,EAAE;QAAE,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvE,wCAAwC;IACxC,KAAK,CAAC,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC;IAC1B,2CAA2C;IAC3C,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,2BAA2B;IAC3B,UAAU,EAAE,MAAM,CAAC;IACnB,iJAAiJ;IACjJ,OAAO,EAAE;QACP,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAC;KAC9B,CAAC;IACF,+BAA+B;IAC/B,IAAI,CAAC,EAAE,OAAO,CAAC;CAChB;AAGD;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,CAAC,GAAG,OAAO;IACtC,6BAA6B;IAC7B,YAAY,EAAE,CAAC,CAAC;IAChB,uBAAuB;IACvB,WAAW,EAAE,WAAW,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB,CAAC,CAAC,GAAG,OAAO;IAC/C;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,kBAAkB,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACxD;;OAEG;IACH,eAAe,EAAE,CACf,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,KACxC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;CAC9B;AAED,MAAM,MAAM,iBAAiB,GAAG,mBAAmB,GAAG,kBAAkB,GAAG,MAAM,CAAC;AAElF;;GAEG;AACH,MAAM,WAAW,uBAAuB,CAAC,OAAO,EAAE,MAAM;IACtD;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,WAAW,KAAK,IAAI,CAAC;IAC7D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;IAC5D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/models.js b/node_modules/@azure/core-lro/dist/commonjs/http/models.js new file mode 100644 index 0000000..783931f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/models.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/models.js.map b/node_modules/@azure/core-lro/dist/commonjs/http/models.js.map new file mode 100644 index 0000000..fec0198 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { LroError } from \"../poller/models.js\";\n\n// TODO: rename to ResourceLocationConfig\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface ResponseBody extends Record {\n /** The status of the operation. */\n status?: unknown;\n /** The state of the provisioning process */\n provisioningState?: unknown;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: unknown } & Record;\n /** The error if the operation failed */\n error?: Partial;\n /** The location of the created resource */\n resourceLocation?: string;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n// TODO: rename to OperationResponse\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path. This should be set if the operation is a PUT and needs\n * to poll from the same request path.\n */\n requestPath?: string;\n /**\n * The HTTP request method. This should be set if the operation is a PUT or a\n * DELETE.\n */\n requestMethod?: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (\n path: string,\n options?: { abortSignal?: AbortSignalLike },\n ) => Promise>;\n}\n\nexport type HttpOperationMode = \"OperationLocation\" | \"ResourceLocation\" | \"Body\";\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreateHttpPollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n resourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, response: LroResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/operation.d.ts b/node_modules/@azure/core-lro/dist/commonjs/http/operation.d.ts new file mode 100644 index 0000000..c06358e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/operation.d.ts @@ -0,0 +1,47 @@ +import { HttpOperationMode, LongRunningOperation, LroResourceLocationConfig, LroResponse, RawResponse } from "./models.js"; +import { LroError, OperationConfig, OperationStatus, RestorableOperationState, StateProxy } from "../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +export declare function inferLroMode(inputs: { + rawResponse: RawResponse; + requestPath?: string; + requestMethod?: string; + resourceLocationConfig?: LroResourceLocationConfig; +}): (OperationConfig & { + mode: HttpOperationMode; +}) | undefined; +export declare function parseRetryAfter({ rawResponse }: LroResponse): number | undefined; +export declare function getErrorFromResponse(response: LroResponse): LroError | undefined; +export declare function getStatusFromInitialResponse(inputs: { + response: LroResponse; + state: RestorableOperationState; + operationLocation?: string; +}): OperationStatus; +/** + * Initiates the long-running operation. + */ +export declare function initHttpOperation(inputs: { + stateProxy: StateProxy; + resourceLocationConfig?: LroResourceLocationConfig; + processResult?: (result: unknown, state: TState) => TResult; + setErrorAsResult: boolean; + lro: LongRunningOperation; +}): Promise>; +export declare function getOperationLocation({ rawResponse }: LroResponse, state: RestorableOperationState): string | undefined; +export declare function getOperationStatus({ rawResponse }: LroResponse, state: RestorableOperationState): OperationStatus; +export declare function getResourceLocation(res: LroResponse, state: RestorableOperationState): string | undefined; +export declare function isOperationError(e: Error): boolean; +/** Polls the long-running operation. */ +export declare function pollHttpOperation(inputs: { + lro: LongRunningOperation; + stateProxy: StateProxy; + processResult?: (result: unknown, state: TState) => TResult; + updateState?: (state: TState, lastResponse: LroResponse) => void; + isDone?: (lastResponse: LroResponse, state: TState) => boolean; + setDelay: (intervalInMs: number) => void; + options?: { + abortSignal?: AbortSignalLike; + }; + state: RestorableOperationState; + setErrorAsResult: boolean; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/operation.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/http/operation.d.ts.map new file mode 100644 index 0000000..39d679a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,yBAAyB,EACzB,WAAW,EACX,WAAW,EAEZ,MAAM,aAAa,CAAC;AACrB,OAAO,EACL,QAAQ,EACR,eAAe,EACf,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AA6D1D,wBAAgB,YAAY,CAAC,MAAM,EAAE;IACnC,WAAW,EAAE,WAAW,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;CACpD,GAAG,CAAC,eAAe,GAAG;IAAE,IAAI,EAAE,iBAAiB,CAAA;CAAE,CAAC,GAAG,SAAS,CA+B9D;AAqDD,wBAAgB,eAAe,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,SAAS,CAUtF;AAED,wBAAgB,oBAAoB,CAAC,CAAC,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,QAAQ,GAAG,SAAS,CAetF;AAWD,wBAAgB,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE;IAC3D,QAAQ,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B,GAAG,eAAe,CAelB;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IAC/D,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,GAAG,EAAE,oBAAoB,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAyB5C;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EACzC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAiBpB;AAED,wBAAgB,kBAAkB,CAAC,MAAM,EACvC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,eAAe,CAejB;AASD,wBAAgB,mBAAmB,CAAC,MAAM,EACxC,GAAG,EAAE,WAAW,EAChB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAMpB;AAED,wBAAgB,gBAAgB,CAAC,CAAC,EAAE,KAAK,GAAG,OAAO,CAElD;AAED,wCAAwC;AACxC,wBAAsB,iBAAiB,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE;IAC/D,GAAG,EAAE,oBAAoB,CAAC;IAC1B,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC/D,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,CAAC;IAC5C,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,IAAI,CAAC,CAkChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/operation.js b/node_modules/@azure/core-lro/dist/commonjs/http/operation.js new file mode 100644 index 0000000..c3fc6ed --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/operation.js @@ -0,0 +1,293 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pollHttpOperation = exports.isOperationError = exports.getResourceLocation = exports.getOperationStatus = exports.getOperationLocation = exports.initHttpOperation = exports.getStatusFromInitialResponse = exports.getErrorFromResponse = exports.parseRetryAfter = exports.inferLroMode = void 0; +const operation_js_1 = require("../poller/operation.js"); +const logger_js_1 = require("../logger.js"); +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation ?? azureAsyncOperation; +} +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(inputs) { + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return getDefault() ?? requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } +} +function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; + } + else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath, + }; + } + else { + return undefined; + } +} +exports.inferLroMode = inferLroMode; +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status?.toLocaleLowerCase()) { + case undefined: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } +} +function getStatus(rawResponse) { + const { status } = rawResponse.body ?? {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function getProvisioningState(rawResponse) { + const { properties, provisioningState } = rawResponse.body ?? {}; + const status = properties?.provisioningState ?? provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } + else if (statusCode < 300) { + return "succeeded"; + } + else { + return "failed"; + } +} +function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter)) + : retryAfterInSeconds * 1000; + } + return undefined; +} +exports.parseRetryAfter = parseRetryAfter; +function getErrorFromResponse(response) { + const error = accessBodyProperty(response, "error"); + if (!error) { + logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; +} +exports.getErrorFromResponse = getErrorFromResponse; +function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return undefined; +} +function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} +exports.getStatusFromInitialResponse = getStatusFromInitialResponse; +/** + * Initiates the long-running operation. + */ +async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + stateProxy, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, + }); +} +exports.initHttpOperation = initHttpOperation; +function getOperationLocation({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return undefined; + } + } +} +exports.getOperationLocation = getOperationLocation; +function getOperationStatus({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } +} +exports.getOperationStatus = getOperationStatus; +function accessBodyProperty({ flatResponse, rawResponse }, prop) { + return flatResponse?.[prop] ?? rawResponse.body?.[prop]; +} +function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; +} +exports.getResourceLocation = getResourceLocation; +function isOperationError(e) { + return e.name === "RestError"; +} +exports.isOperationError = isOperationError; +/** Polls the long-running operation. */ +async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult + ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) + : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, + }); +} +exports.pollHttpOperation = pollHttpOperation; +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/operation.js.map b/node_modules/@azure/core-lro/dist/commonjs/http/operation.js.map new file mode 100644 index 0000000..fcfd5b9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAiBlC,yDAAsE;AAEtE,4CAAsC;AAEtC,SAAS,8BAA8B,CAAC,MAGvC;IACC,MAAM,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IAC1D,OAAO,iBAAiB,IAAI,mBAAmB,CAAC;AAClD,CAAC;AAED,SAAS,iBAAiB,CAAC,WAAwB;IACjD,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,0BAA0B,CAAC,WAAwB;IAC1D,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,4BAA4B,CAAC,WAAwB;IAC5D,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAAC,MAK7B;IACC,MAAM,EAAE,QAAQ,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IAChF,QAAQ,aAAa,EAAE,CAAC;QACtB,KAAK,KAAK,CAAC,CAAC,CAAC;YACX,OAAO,WAAW,CAAC;QACrB,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,KAAK,OAAO,CAAC,CAAC,CAAC;YACb,OAAO,UAAU,EAAE,IAAI,WAAW,CAAC;QACrC,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,UAAU,EAAE,CAAC;QACtB,CAAC;IACH,CAAC;IAED,SAAS,UAAU;QACjB,QAAQ,sBAAsB,EAAE,CAAC;YAC/B,KAAK,uBAAuB,CAAC,CAAC,CAAC;gBAC7B,OAAO,SAAS,CAAC;YACnB,CAAC;YACD,KAAK,cAAc,CAAC,CAAC,CAAC;gBACpB,OAAO,WAAW,CAAC;YACrB,CAAC;YACD,KAAK,UAAU,CAAC;YAChB,OAAO,CAAC,CAAC,CAAC;gBACR,OAAO,QAAQ,CAAC;YAClB,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED,SAAgB,YAAY,CAAC,MAK5B;IACC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IACnF,MAAM,iBAAiB,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAC;IAClE,MAAM,mBAAmB,GAAG,4BAA4B,CAAC,WAAW,CAAC,CAAC;IACtE,MAAM,UAAU,GAAG,8BAA8B,CAAC,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC9F,MAAM,QAAQ,GAAG,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,uBAAuB,GAAG,aAAa,EAAE,iBAAiB,EAAE,CAAC;IACnE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,iBAAiB,EAAE,UAAU;YAC7B,gBAAgB,EAAE,oBAAoB,CAAC;gBACrC,aAAa,EAAE,uBAAuB;gBACtC,QAAQ;gBACR,WAAW;gBACX,sBAAsB;aACvB,CAAC;SACH,CAAC;IACJ,CAAC;SAAM,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,kBAAkB;YACxB,iBAAiB,EAAE,QAAQ;SAC5B,CAAC;IACJ,CAAC;SAAM,IAAI,uBAAuB,KAAK,KAAK,IAAI,WAAW,EAAE,CAAC;QAC5D,OAAO;YACL,IAAI,EAAE,MAAM;YACZ,iBAAiB,EAAE,WAAW;SAC/B,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,OAAO,SAAS,CAAC;IACnB,CAAC;AACH,CAAC;AApCD,oCAoCC;AAED,SAAS,eAAe,CAAC,MAA+C;IACtE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,MAAM,CAAC;IACtC,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACvD,MAAM,IAAI,KAAK,CACb,oGAAoG,MAAM,sIAAsI,CACjP,CAAC;IACJ,CAAC;IACD,QAAQ,MAAM,EAAE,iBAAiB,EAAE,EAAE,CAAC;QACpC,KAAK,SAAS;YACZ,OAAO,iBAAiB,CAAC,UAAU,CAAC,CAAC;QACvC,KAAK,WAAW;YACd,OAAO,WAAW,CAAC;QACrB,KAAK,QAAQ;YACX,OAAO,QAAQ,CAAC;QAClB,KAAK,SAAS,CAAC;QACf,KAAK,UAAU,CAAC;QAChB,KAAK,SAAS,CAAC;QACf,KAAK,WAAW,CAAC;QACjB,KAAK,YAAY;YACf,OAAO,SAAS,CAAC;QACnB,KAAK,UAAU,CAAC;QAChB,KAAK,WAAW;YACd,OAAO,UAAU,CAAC;QACpB,OAAO,CAAC,CAAC,CAAC;YACR,kBAAM,CAAC,OAAO,CAAC,uCAAuC,MAAM,EAAE,CAAC,CAAC;YAChE,OAAO,MAAyB,CAAC;QACnC,CAAC;IACH,CAAC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,WAAwB;IACzC,MAAM,EAAE,MAAM,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IAC5D,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB;IACpD,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IACnF,MAAM,MAAM,GAAG,UAAU,EAAE,iBAAiB,IAAI,iBAAiB,CAAC;IAClE,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB;IAC3C,IAAI,UAAU,KAAK,GAAG,EAAE,CAAC;QACvB,OAAO,SAAS,CAAC;IACnB,CAAC;SAAM,IAAI,UAAU,GAAG,GAAG,EAAE,CAAC;QAC5B,OAAO,WAAW,CAAC;IACrB,CAAC;SAAM,CAAC;QACN,OAAO,QAAQ,CAAC;IAClB,CAAC;AACH,CAAC;AAED,SAAgB,eAAe,CAAI,EAAE,WAAW,EAAkB;IAChE,MAAM,UAAU,GAAuB,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;IAC1E,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,wEAAwE;QACxE,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;QACjD,OAAO,KAAK,CAAC,mBAAmB,CAAC;YAC/B,CAAC,CAAC,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC;YACxD,CAAC,CAAC,mBAAmB,GAAG,IAAI,CAAC;IACjC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAVD,0CAUC;AAED,SAAgB,oBAAoB,CAAI,QAAwB;IAC9D,MAAM,KAAK,GAAG,kBAAkB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACpD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,kBAAM,CAAC,OAAO,CACZ,yFAAyF,CAC1F,CAAC;QACF,OAAO;IACT,CAAC;IACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC;QAClC,kBAAM,CAAC,OAAO,CACZ,iHAAiH,CAClH,CAAC;QACF,OAAO;IACT,CAAC;IACD,OAAO,KAAiB,CAAC;AAC3B,CAAC;AAfD,oDAeC;AAED,SAAS,gCAAgC,CAAC,cAAoB;IAC5D,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE,CAAC;QAC7B,OAAO,cAAc,GAAG,OAAO,CAAC;IAClC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAgB,4BAA4B,CAAS,MAIpD;IACC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IACtD,SAAS,MAAM;QACb,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;QAC7C,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,SAAS;gBACZ,OAAO,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC5D,KAAK,MAAM;gBACT,OAAO,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YAC7C;gBACE,OAAO,SAAS,CAAC;QACrB,CAAC;IACH,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC;IACxB,OAAO,MAAM,KAAK,SAAS,IAAI,iBAAiB,KAAK,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC;AACxF,CAAC;AAnBD,oEAmBC;AAED;;GAEG;AACI,KAAK,UAAU,iBAAiB,CAAkB,MAMxD;IACC,MAAM,EAAE,UAAU,EAAE,sBAAsB,EAAE,aAAa,EAAE,GAAG,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IAC5F,OAAO,IAAA,4BAAa,EAAC;QACnB,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,UAAU;QACV,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,kBAAkB,EAAE,4BAA4B;QAChD,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC;AA/BD,8CA+BC;AAED,SAAgB,oBAAoB,CAClC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,8BAA8B,CAAC;gBACpC,iBAAiB,EAAE,0BAA0B,CAAC,WAAW,CAAC;gBAC1D,mBAAmB,EAAE,4BAA4B,CAAC,WAAW,CAAC;aAC/D,CAAC,CAAC;QACL,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,CAAC;QACxC,CAAC;QACD,KAAK,MAAM,CAAC;QACZ,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,SAAS,CAAC;QACnB,CAAC;IACH,CAAC;AACH,CAAC;AApBD,oDAoBC;AAED,SAAgB,kBAAkB,CAChC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,SAAS,CAAC,WAAW,CAAC,CAAC;QAChC,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,OAAO,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAC3C,CAAC;QACD;YACE,MAAM,IAAI,KAAK,CAAC,8CAA8C,IAAI,EAAE,CAAC,CAAC;IAC1E,CAAC;AACH,CAAC;AAlBD,gDAkBC;AAED,SAAS,kBAAkB,CACzB,EAAE,YAAY,EAAE,WAAW,EAAe,EAC1C,IAAO;IAEP,OAAQ,YAA6B,EAAE,CAAC,IAAI,CAAC,IAAK,WAAW,CAAC,IAAqB,EAAE,CAAC,IAAI,CAAC,CAAC;AAC9F,CAAC;AAED,SAAgB,mBAAmB,CACjC,GAAgB,EAChB,KAAuC;IAEvC,MAAM,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC;IACxD,IAAI,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE,CAAC;QACnC,KAAK,CAAC,MAAM,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACtC,CAAC;IACD,OAAO,KAAK,CAAC,MAAM,CAAC,gBAAgB,CAAC;AACvC,CAAC;AATD,kDASC;AAED,SAAgB,gBAAgB,CAAC,CAAQ;IACvC,OAAO,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC;AAChC,CAAC;AAFD,4CAEC;AAED,wCAAwC;AACjC,KAAK,UAAU,iBAAiB,CAAkB,MAUxD;IACC,MAAM,EACJ,GAAG,EACH,UAAU,EACV,OAAO,EACP,aAAa,EACb,WAAW,EACX,QAAQ,EACR,KAAK,EACL,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,OAAO,IAAA,4BAAa,EAAC;QACnB,KAAK;QACL,UAAU;QACV,QAAQ;QACR,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,UAAU,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,UAAU,CAAC;YAC3E,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,QAAQ,EAAE,oBAAoB;QAC9B,WAAW;QACX,kBAAkB,EAAE,eAAe;QACnC,oBAAoB;QACpB,kBAAkB;QAClB,gBAAgB;QAChB,mBAAmB;QACnB,OAAO;QACP;;;WAGG;QACH,IAAI,EAAE,KAAK,EAAE,QAAgB,EAAE,YAAgD,EAAE,EAAE,CACjF,GAAG,CAAC,eAAe,CAAC,QAAQ,EAAE,YAAY,CAAC;QAC7C,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC;AA5CD,8CA4CC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n HttpOperationMode,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n RawResponse,\n ResponseBody,\n} from \"./models.js\";\nimport {\n LroError,\n OperationConfig,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"../poller/models.js\";\nimport { initOperation, pollOperation } from \"../poller/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { logger } from \"../logger.js\";\n\nfunction getOperationLocationPollingUrl(inputs: {\n operationLocation?: string;\n azureAsyncOperation?: string;\n}): string | undefined {\n const { azureAsyncOperation, operationLocation } = inputs;\n return operationLocation ?? azureAsyncOperation;\n}\n\nfunction getLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(inputs: {\n requestMethod?: string;\n location?: string;\n requestPath?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): string | undefined {\n const { location, requestMethod, requestPath, resourceLocationConfig } = inputs;\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"DELETE\": {\n return undefined;\n }\n case \"PATCH\": {\n return getDefault() ?? requestPath;\n }\n default: {\n return getDefault();\n }\n }\n\n function getDefault() {\n switch (resourceLocationConfig) {\n case \"azure-async-operation\": {\n return undefined;\n }\n case \"original-uri\": {\n return requestPath;\n }\n case \"location\":\n default: {\n return location;\n }\n }\n }\n}\n\nexport function inferLroMode(inputs: {\n rawResponse: RawResponse;\n requestPath?: string;\n requestMethod?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): (OperationConfig & { mode: HttpOperationMode }) | undefined {\n const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs;\n const operationLocation = getOperationLocationHeader(rawResponse);\n const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse);\n const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation });\n const location = getLocationHeader(rawResponse);\n const normalizedRequestMethod = requestMethod?.toLocaleUpperCase();\n if (pollingUrl !== undefined) {\n return {\n mode: \"OperationLocation\",\n operationLocation: pollingUrl,\n resourceLocation: findResourceLocation({\n requestMethod: normalizedRequestMethod,\n location,\n requestPath,\n resourceLocationConfig,\n }),\n };\n } else if (location !== undefined) {\n return {\n mode: \"ResourceLocation\",\n operationLocation: location,\n };\n } else if (normalizedRequestMethod === \"PUT\" && requestPath) {\n return {\n mode: \"Body\",\n operationLocation: requestPath,\n };\n } else {\n return undefined;\n }\n}\n\nfunction transformStatus(inputs: { status: unknown; statusCode: number }): OperationStatus {\n const { status, statusCode } = inputs;\n if (typeof status !== \"string\" && status !== undefined) {\n throw new Error(\n `Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`,\n );\n }\n switch (status?.toLocaleLowerCase()) {\n case undefined:\n return toOperationStatus(statusCode);\n case \"succeeded\":\n return \"succeeded\";\n case \"failed\":\n return \"failed\";\n case \"running\":\n case \"accepted\":\n case \"started\":\n case \"canceling\":\n case \"cancelling\":\n return \"running\";\n case \"canceled\":\n case \"cancelled\":\n return \"canceled\";\n default: {\n logger.verbose(`LRO: unrecognized operation status: ${status}`);\n return status as OperationStatus;\n }\n }\n}\n\nfunction getStatus(rawResponse: RawResponse): OperationStatus {\n const { status } = (rawResponse.body as ResponseBody) ?? {};\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction getProvisioningState(rawResponse: RawResponse): OperationStatus {\n const { properties, provisioningState } = (rawResponse.body as ResponseBody) ?? {};\n const status = properties?.provisioningState ?? provisioningState;\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction toOperationStatus(statusCode: number): OperationStatus {\n if (statusCode === 202) {\n return \"running\";\n } else if (statusCode < 300) {\n return \"succeeded\";\n } else {\n return \"failed\";\n }\n}\n\nexport function parseRetryAfter({ rawResponse }: LroResponse): number | undefined {\n const retryAfter: string | undefined = rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n return isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter))\n : retryAfterInSeconds * 1000;\n }\n return undefined;\n}\n\nexport function getErrorFromResponse(response: LroResponse): LroError | undefined {\n const error = accessBodyProperty(response, \"error\");\n if (!error) {\n logger.warning(\n `The long-running operation failed but there is no error property in the response's body`,\n );\n return;\n }\n if (!error.code || !error.message) {\n logger.warning(\n `The long-running operation failed but the error property in the response's body doesn't contain code or message`,\n );\n return;\n }\n return error as LroError;\n}\n\nfunction calculatePollingIntervalFromDate(retryAfterDate: Date): number | undefined {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return undefined;\n}\n\nexport function getStatusFromInitialResponse(inputs: {\n response: LroResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n}): OperationStatus {\n const { response, state, operationLocation } = inputs;\n function helper(): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case undefined:\n return toOperationStatus(response.rawResponse.statusCode);\n case \"Body\":\n return getOperationStatus(response, state);\n default:\n return \"running\";\n }\n }\n const status = helper();\n return status === \"running\" && operationLocation === undefined ? \"succeeded\" : status;\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initHttpOperation(inputs: {\n stateProxy: StateProxy;\n resourceLocationConfig?: LroResourceLocationConfig;\n processResult?: (result: unknown, state: TState) => TResult;\n setErrorAsResult: boolean;\n lro: LongRunningOperation;\n}): Promise> {\n const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs;\n return initOperation({\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n stateProxy,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n getOperationStatus: getStatusFromInitialResponse,\n setErrorAsResult,\n });\n}\n\nexport function getOperationLocation(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getOperationLocationPollingUrl({\n operationLocation: getOperationLocationHeader(rawResponse),\n azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse),\n });\n }\n case \"ResourceLocation\": {\n return getLocationHeader(rawResponse);\n }\n case \"Body\":\n default: {\n return undefined;\n }\n }\n}\n\nexport function getOperationStatus(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getStatus(rawResponse);\n }\n case \"ResourceLocation\": {\n return toOperationStatus(rawResponse.statusCode);\n }\n case \"Body\": {\n return getProvisioningState(rawResponse);\n }\n default:\n throw new Error(`Internal error: Unexpected operation mode: ${mode}`);\n }\n}\n\nfunction accessBodyProperty

(\n { flatResponse, rawResponse }: LroResponse,\n prop: P,\n): ResponseBody[P] {\n return (flatResponse as ResponseBody)?.[prop] ?? (rawResponse.body as ResponseBody)?.[prop];\n}\n\nexport function getResourceLocation(\n res: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const loc = accessBodyProperty(res, \"resourceLocation\");\n if (loc && typeof loc === \"string\") {\n state.config.resourceLocation = loc;\n }\n return state.config.resourceLocation;\n}\n\nexport function isOperationError(e: Error): boolean {\n return e.name === \"RestError\";\n}\n\n/** Polls the long-running operation. */\nexport async function pollHttpOperation(inputs: {\n lro: LongRunningOperation;\n stateProxy: StateProxy;\n processResult?: (result: unknown, state: TState) => TResult;\n updateState?: (state: TState, lastResponse: LroResponse) => void;\n isDone?: (lastResponse: LroResponse, state: TState) => boolean;\n setDelay: (intervalInMs: number) => void;\n options?: { abortSignal?: AbortSignalLike };\n state: RestorableOperationState;\n setErrorAsResult: boolean;\n}): Promise {\n const {\n lro,\n stateProxy,\n options,\n processResult,\n updateState,\n setDelay,\n state,\n setErrorAsResult,\n } = inputs;\n return pollOperation({\n state,\n stateProxy,\n setDelay,\n processResult: processResult\n ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState)\n : ({ flatResponse }) => flatResponse as TResult,\n getError: getErrorFromResponse,\n updateState,\n getPollingInterval: parseRetryAfter,\n getOperationLocation,\n getOperationStatus,\n isOperationError,\n getResourceLocation,\n options,\n /**\n * The expansion here is intentional because `lro` could be an object that\n * references an inner this, so we need to preserve a reference to it.\n */\n poll: async (location: string, inputOptions?: { abortSignal?: AbortSignalLike }) =>\n lro.sendPollRequest(location, inputOptions),\n setErrorAsResult,\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/poller.d.ts b/node_modules/@azure/core-lro/dist/commonjs/http/poller.d.ts new file mode 100644 index 0000000..5199c5c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/poller.d.ts @@ -0,0 +1,11 @@ +import { LongRunningOperation } from "./models.js"; +import { OperationState, SimplePollerLike } from "../poller/models.js"; +import { CreateHttpPollerOptions } from "./models.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export declare function createHttpPoller>(lro: LongRunningOperation, options?: CreateHttpPollerOptions): Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/poller.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/http/poller.d.ts.map new file mode 100644 index 0000000..5f6cfe9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAe,MAAM,aAAa,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAWvE,OAAO,EAAE,uBAAuB,EAAE,MAAM,aAAa,CAAC;AAGtD;;;;;GAKG;AACH,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EACpF,GAAG,EAAE,oBAAoB,EACzB,OAAO,CAAC,EAAE,uBAAuB,CAAC,OAAO,EAAE,MAAM,CAAC,GACjD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAgD5C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/poller.js b/node_modules/@azure/core-lro/dist/commonjs/http/poller.js new file mode 100644 index 0000000..52d6606 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/poller.js @@ -0,0 +1,53 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createHttpPoller = void 0; +const operation_js_1 = require("./operation.js"); +const poller_js_1 = require("../poller/poller.js"); +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful, + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + poll: lro.sendPollRequest, + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + }); +} +exports.createHttpPoller = createHttpPoller; +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/poller.js.map b/node_modules/@azure/core-lro/dist/commonjs/http/poller.js.map new file mode 100644 index 0000000..64d196a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/http/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAIlC,iDASwB;AAExB,mDAAwD;AAExD;;;;;GAKG;AACI,KAAK,UAAU,gBAAgB,CACpC,GAAyB,EACzB,OAAkD;IAElD,MAAM,EACJ,sBAAsB,EACtB,YAAY,EACZ,aAAa,EACb,WAAW,EACX,WAAW,EACX,qBAAqB,EACrB,qBAAqB,GAAG,KAAK,GAC9B,GAAG,OAAO,IAAI,EAAE,CAAC;IAClB,OAAO,IAAA,6BAAiB,EAA+B;QACrD,4BAA4B,EAA5B,2CAA4B;QAC5B,yBAAyB,EAAE,iCAAkB;QAC7C,gBAAgB,EAAhB,+BAAgB;QAChB,oBAAoB,EAApB,mCAAoB;QACpB,mBAAmB,EAAnB,kCAAmB;QACnB,kBAAkB,EAAE,8BAAe;QACnC,QAAQ,EAAE,mCAAoB;QAC9B,qBAAqB;KACtB,CAAC,CACA;QACE,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,IAAA,2BAAY,EAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,IAAI,EAAE,GAAG,CAAC,eAAe;KAC1B,EACD;QACE,YAAY;QACZ,qBAAqB;QACrB,WAAW;QACX,WAAW;QACX,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;KAClD,CACF,CAAC;AACJ,CAAC;AAnDD,4CAmDC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResponse } from \"./models.js\";\nimport { OperationState, SimplePollerLike } from \"../poller/models.js\";\nimport {\n getErrorFromResponse,\n getOperationLocation,\n getOperationStatus,\n getResourceLocation,\n getStatusFromInitialResponse,\n inferLroMode,\n isOperationError,\n parseRetryAfter,\n} from \"./operation.js\";\nimport { CreateHttpPollerOptions } from \"./models.js\";\nimport { buildCreatePoller } from \"../poller/poller.js\";\n\n/**\n * Creates a poller that can be used to poll a long-running operation.\n * @param lro - Description of the long-running operation\n * @param options - options to configure the poller\n * @returns an initialized poller\n */\nexport async function createHttpPoller>(\n lro: LongRunningOperation,\n options?: CreateHttpPollerOptions,\n): Promise> {\n const {\n resourceLocationConfig,\n intervalInMs,\n processResult,\n restoreFrom,\n updateState,\n withOperationLocation,\n resolveOnUnsuccessful = false,\n } = options || {};\n return buildCreatePoller({\n getStatusFromInitialResponse,\n getStatusFromPollResponse: getOperationStatus,\n isOperationError,\n getOperationLocation,\n getResourceLocation,\n getPollingInterval: parseRetryAfter,\n getError: getErrorFromResponse,\n resolveOnUnsuccessful,\n })(\n {\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n poll: lro.sendPollRequest,\n },\n {\n intervalInMs,\n withOperationLocation,\n restoreFrom,\n updateState,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/index.d.ts b/node_modules/@azure/core-lro/dist/commonjs/index.d.ts new file mode 100644 index 0000000..9783948 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/index.d.ts @@ -0,0 +1,13 @@ +export { createHttpPoller } from "./http/poller.js"; +export { CancelOnProgress, OperationState, OperationStatus, SimplePollerLike, } from "./poller/models.js"; +export { CreateHttpPollerOptions } from "./http/models.js"; +export { LroResourceLocationConfig, LongRunningOperation, LroResponse, RawResponse, } from "./http/models.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +export { PollerLike } from "./legacy/models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..c096dc1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,cAAc,EACd,eAAe,EACf,gBAAgB,GACjB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC;AAC3D,OAAO,EACL,yBAAyB,EACzB,oBAAoB,EACpB,WAAW,EACX,WAAW,GACZ,MAAM,kBAAkB,CAAC;AAE1B;;GAEG;AAUH,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/index.js b/node_modules/@azure/core-lro/dist/commonjs/index.js new file mode 100644 index 0000000..ec5cd53 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/index.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createHttpPoller = void 0; +const tslib_1 = require("tslib"); +var poller_js_1 = require("./http/poller.js"); +Object.defineProperty(exports, "createHttpPoller", { enumerable: true, get: function () { return poller_js_1.createHttpPoller; } }); +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +// export { +// BuildCreatePollerOptions, +// Operation, +// CreatePollerOptions, +// OperationConfig, +// RestorableOperationState, +// } from "./poller/models"; +// export { buildCreatePoller } from "./poller/poller"; +/** legacy */ +tslib_1.__exportStar(require("./legacy/lroEngine/index.js"), exports); +tslib_1.__exportStar(require("./legacy/poller.js"), exports); +tslib_1.__exportStar(require("./legacy/pollOperation.js"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/index.js.map b/node_modules/@azure/core-lro/dist/commonjs/index.js.map new file mode 100644 index 0000000..695964c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;;AAElC,8CAAoD;AAA3C,6GAAA,gBAAgB,OAAA;AAezB;;GAEG;AACH,WAAW;AACX,8BAA8B;AAC9B,eAAe;AACf,yBAAyB;AACzB,qBAAqB;AACrB,8BAA8B;AAC9B,4BAA4B;AAC5B,uDAAuD;AAEvD,aAAa;AACb,sEAA4C;AAC5C,6DAAmC;AACnC,oEAA0C","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { createHttpPoller } from \"./http/poller.js\";\nexport {\n CancelOnProgress,\n OperationState,\n OperationStatus,\n SimplePollerLike,\n} from \"./poller/models.js\";\nexport { CreateHttpPollerOptions } from \"./http/models.js\";\nexport {\n LroResourceLocationConfig,\n LongRunningOperation,\n LroResponse,\n RawResponse,\n} from \"./http/models.js\";\n\n/**\n * This can be uncommented to expose the protocol-agnostic poller\n */\n// export {\n// BuildCreatePollerOptions,\n// Operation,\n// CreatePollerOptions,\n// OperationConfig,\n// RestorableOperationState,\n// } from \"./poller/models\";\n// export { buildCreatePoller } from \"./poller/poller\";\n\n/** legacy */\nexport * from \"./legacy/lroEngine/index.js\";\nexport * from \"./legacy/poller.js\";\nexport * from \"./legacy/pollOperation.js\";\nexport { PollerLike } from \"./legacy/models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.d.ts new file mode 100644 index 0000000..b0d84c8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.d.ts @@ -0,0 +1,3 @@ +export { LroEngine } from "./lroEngine.js"; +export { LroEngineOptions } from "./models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.d.ts.map new file mode 100644 index 0000000..ebf1159 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js new file mode 100644 index 0000000..a9e0079 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LroEngine = void 0; +var lroEngine_js_1 = require("./lroEngine.js"); +Object.defineProperty(exports, "LroEngine", { enumerable: true, get: function () { return lroEngine_js_1.LroEngine; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js.map new file mode 100644 index 0000000..cfe9839 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,+CAA2C;AAAlC,yGAAA,SAAS,OAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { LroEngine } from \"./lroEngine.js\";\nexport { LroEngineOptions } from \"./models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.d.ts new file mode 100644 index 0000000..937101c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.d.ts @@ -0,0 +1,16 @@ +import { LroEngineOptions } from "./models.js"; +import { LongRunningOperation } from "../../http/models.js"; +import { PollOperationState } from "../pollOperation.js"; +import { Poller } from "../poller.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} +//# sourceMappingURL=lroEngine.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.d.ts.map new file mode 100644 index 0000000..640c27e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAgB,MAAM,aAAa,CAAC;AAE7D,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAE5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAItC;;GAEG;AACH,qBAAa,SAAS,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CAAE,SAAQ,MAAM,CACxF,MAAM,EACN,OAAO,CACR;IACC,OAAO,CAAC,MAAM,CAAe;gBAEjB,GAAG,EAAE,oBAAoB,CAAC,OAAO,CAAC,EAAE,OAAO,CAAC,EAAE,gBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC;IA6B3F;;OAEG;IACH,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAGvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js new file mode 100644 index 0000000..ed5cd57 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js @@ -0,0 +1,34 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LroEngine = void 0; +const operation_js_1 = require("./operation.js"); +const constants_js_1 = require("../../poller/constants.js"); +const poller_js_1 = require("../poller.js"); +const operation_js_2 = require("../../poller/operation.js"); +/** + * The LRO Engine, a class that performs polling. + */ +class LroEngine extends poller_js_1.Poller { + config; + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; + const state = resumeFrom + ? (0, operation_js_2.deserializeState)(resumeFrom) + : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +exports.LroEngine = LroEngine; +//# sourceMappingURL=lroEngine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js.map new file mode 100644 index 0000000..25e6359 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAGlC,iDAAsD;AAEtD,4DAAgE;AAEhE,4CAAsC;AAEtC,4DAA6D;AAE7D;;GAEG;AACH,MAAa,SAA+D,SAAQ,kBAGnF;IACS,MAAM,CAAe;IAE7B,YAAY,GAAkC,EAAE,OAA2C;QACzF,MAAM,EACJ,YAAY,GAAG,kCAAmB,EAClC,UAAU,EACV,qBAAqB,GAAG,KAAK,EAC7B,MAAM,EACN,yBAAyB,EACzB,aAAa,EACb,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,KAAK,GAAqC,UAAU;YACxD,CAAC,CAAC,IAAA,+BAAgB,EAAC,UAAU,CAAC;YAC9B,CAAC,CAAE,EAAuC,CAAC;QAC7C,MAAM,SAAS,GAAG,IAAI,mCAAoB,CACxC,KAAK,EACL,GAAG,EACH,CAAC,qBAAqB,EACtB,yBAAyB,EACzB,aAAa,EACb,WAAW,EACX,MAAM,CACP,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QACjB,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;QAEnD,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;QAC7C,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;IACzF,CAAC;CACF;AAzCD,8BAyCC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroEngineOptions, PollerConfig } from \"./models.js\";\nimport { GenericPollOperation } from \"./operation.js\";\nimport { LongRunningOperation } from \"../../http/models.js\";\nimport { POLL_INTERVAL_IN_MS } from \"../../poller/constants.js\";\nimport { PollOperationState } from \"../pollOperation.js\";\nimport { Poller } from \"../poller.js\";\nimport { RestorableOperationState } from \"../../poller/models.js\";\nimport { deserializeState } from \"../../poller/operation.js\";\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const {\n intervalInMs = POLL_INTERVAL_IN_MS,\n resumeFrom,\n resolveOnUnsuccessful = false,\n isDone,\n lroResourceLocationConfig,\n processResult,\n updateState,\n } = options || {};\n const state: RestorableOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as RestorableOperationState);\n const operation = new GenericPollOperation(\n state,\n lro,\n !resolveOnUnsuccessful,\n lroResourceLocationConfig,\n processResult,\n updateState,\n isDone,\n );\n super(operation);\n this.resolveOnUnsuccessful = resolveOnUnsuccessful;\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.d.ts new file mode 100644 index 0000000..bf26d04 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.d.ts @@ -0,0 +1,38 @@ +import { LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +/** + * Options for the LRO poller. + */ +export interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +export interface PollerConfig { + intervalInMs: number; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.d.ts.map new file mode 100644 index 0000000..c880365 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAE9E;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,OAAO,EAAE,MAAM;IAC/C;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,yBAAyB,CAAC,EAAE,yBAAyB,CAAC;IACtD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE;;OAEG;IACH,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC3D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,YAAY;IAC3B,YAAY,EAAE,MAAM,CAAC;CACtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.js new file mode 100644 index 0000000..783931f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.js.map new file mode 100644 index 0000000..13b2b73 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.d.ts new file mode 100644 index 0000000..d1257d1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.d.ts @@ -0,0 +1,27 @@ +import { LongRunningOperation, LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +import { PollOperation, PollOperationState } from "../pollOperation.js"; +import { RestorableOperationState } from "../../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { PollerConfig } from "./models.js"; +export declare class GenericPollOperation> implements PollOperation { + state: RestorableOperationState; + private lro; + private setErrorAsResult; + private lroResourceLocationConfig?; + private processResult?; + private updateState?; + private isDone?; + private pollerConfig?; + constructor(state: RestorableOperationState, lro: LongRunningOperation, setErrorAsResult: boolean, lroResourceLocationConfig?: LroResourceLocationConfig | undefined, processResult?: ((result: unknown, state: TState) => TResult) | undefined, updateState?: ((state: TState, lastResponse: RawResponse) => void) | undefined, isDone?: ((lastResponse: TResult, state: TState) => boolean) | undefined); + setPollerConfig(pollerConfig: PollerConfig): void; + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + cancel(): Promise>; + /** + * Serializes the Poller operation. + */ + toString(): string; +} +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.d.ts.map new file mode 100644 index 0000000..9b66455 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACpG,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACxE,OAAO,EAAE,wBAAwB,EAAc,MAAM,wBAAwB,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAyB3C,qBAAa,oBAAoB,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CACnF,YAAW,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAKhC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC;IAC9C,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,gBAAgB;IACxB,OAAO,CAAC,yBAAyB,CAAC;IAClC,OAAO,CAAC,aAAa,CAAC;IACtB,OAAO,CAAC,WAAW,CAAC;IACpB,OAAO,CAAC,MAAM,CAAC;IATjB,OAAO,CAAC,YAAY,CAAC,CAAe;gBAG3B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,EACtC,GAAG,EAAE,oBAAoB,EACzB,gBAAgB,EAAE,OAAO,EACzB,yBAAyB,CAAC,uCAA2B,EACrD,aAAa,CAAC,YAAW,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA,EAC3D,WAAW,CAAC,WAAU,MAAM,gBAAgB,WAAW,KAAK,IAAI,aAAA,EAChE,MAAM,CAAC,kBAAiB,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA;IAG7D,eAAe,CAAC,YAAY,EAAE,YAAY,GAAG,IAAI;IAIlD,MAAM,CAAC,OAAO,CAAC,EAAE;QACrB,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAwCrC,MAAM,IAAI,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAKvD;;OAEG;IACI,QAAQ,IAAI,MAAM;CAK1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js new file mode 100644 index 0000000..16b75c7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js @@ -0,0 +1,98 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GenericPollOperation = void 0; +const operation_js_1 = require("../../http/operation.js"); +const logger_js_1 = require("../../logger.js"); +const createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => (state.isCancelled = true), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.isStarted = true), + setSucceeded: (state) => (state.isCompleted = true), + setFailed: () => { + /** empty body */ + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), +}); +class GenericPollOperation { + state; + lro; + setErrorAsResult; + lroResourceLocationConfig; + processResult; + updateState; + isDone; + pollerConfig; + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = { + ...this.state, + ...(await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + })), + }; + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === undefined) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState + ? (state, { rawResponse }) => updateState(state, rawResponse) + : undefined, + isDone: isDone + ? ({ flatResponse }, state) => isDone(flatResponse, state) + : undefined, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult, + }); + } + options?.fireProgress?.(this.state); + return this; + } + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +exports.GenericPollOperation = GenericPollOperation; +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js.map new file mode 100644 index 0000000..e05ad48 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAKlC,0DAA+E;AAG/E,+CAAyC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAQ;IAC3D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;IAC/C,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IACnD,SAAS,EAAE,GAAG,EAAE;QACd,iBAAiB;IACnB,CAAC;IAED,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW;IAC1C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;IAClC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS;IACvC,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;CACzF,CAAC,CAAC;AAEH,MAAa,oBAAoB;IAMtB;IACC;IACA;IACA;IACA;IACA;IACA;IATF,YAAY,CAAgB;IAEpC,YACS,KAAuC,EACtC,GAAyB,EACzB,gBAAyB,EACzB,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D;QAN3D,UAAK,GAAL,KAAK,CAAkC;QACtC,QAAG,GAAH,GAAG,CAAsB;QACzB,qBAAgB,GAAhB,gBAAgB,CAAS;QACzB,8BAAyB,GAAzB,yBAAyB,CAA4B;QACrD,kBAAa,GAAb,aAAa,CAA8C;QAC3D,gBAAW,GAAX,WAAW,CAAqD;QAChE,WAAM,GAAN,MAAM,CAAoD;IACjE,CAAC;IAEG,eAAe,CAAC,YAA0B;QAC/C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,OAGZ;QACC,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,GAAG;gBACX,GAAG,IAAI,CAAC,KAAK;gBACb,GAAG,CAAC,MAAM,IAAA,gCAAiB,EAAC;oBAC1B,GAAG,EAAE,IAAI,CAAC,GAAG;oBACb,UAAU;oBACV,sBAAsB,EAAE,IAAI,CAAC,yBAAyB;oBACtD,aAAa,EAAE,IAAI,CAAC,aAAa;oBACjC,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;iBACxC,CAAC,CAAC;aACJ,CAAC;QACJ,CAAC;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;QACrC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAE3B,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC9D,MAAM,IAAA,gCAAiB,EAAC;gBACtB,GAAG,EAAE,IAAI,CAAC,GAAG;gBACb,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,UAAU;gBACV,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,WAAW,EAAE,WAAW;oBACtB,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,WAAW,EAAE,EAAE,EAAE,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,CAAC;oBAC7D,CAAC,CAAC,SAAS;gBACb,MAAM,EAAE,MAAM;oBACZ,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,MAAM,CAAC,YAAuB,EAAE,KAAK,CAAC;oBACrE,CAAC,CAAC,SAAS;gBACb,OAAO;gBACP,QAAQ,EAAE,CAAC,YAAY,EAAE,EAAE;oBACzB,IAAI,CAAC,YAAa,CAAC,YAAY,GAAG,YAAY,CAAC;gBACjD,CAAC;gBACD,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;aACxC,CAAC,CAAC;QACL,CAAC;QACD,OAAO,EAAE,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,MAAM;QACV,kBAAM,CAAC,KAAK,CAAC,+DAA+D,CAAC,CAAC;QAC9E,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAC;IACL,CAAC;CACF;AA3ED,oDA2EC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\nimport { PollOperation, PollOperationState } from \"../pollOperation.js\";\nimport { RestorableOperationState, StateProxy } from \"../../poller/models.js\";\nimport { initHttpOperation, pollHttpOperation } from \"../../http/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { PollerConfig } from \"./models.js\";\nimport { logger } from \"../../logger.js\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n initState: (config) => ({ config, isStarted: true }) as any,\n setCanceled: (state) => (state.isCancelled = true),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.isStarted = true),\n setSucceeded: (state) => (state.isCompleted = true),\n setFailed: () => {\n /** empty body */\n },\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => !!state.isCancelled,\n isFailed: (state) => !!state.error,\n isRunning: (state) => !!state.isStarted,\n isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error),\n});\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private pollerConfig?: PollerConfig;\n\n constructor(\n public state: RestorableOperationState,\n private lro: LongRunningOperation,\n private setErrorAsResult: boolean,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean,\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const stateProxy = createStateProxy();\n if (!this.state.isStarted) {\n this.state = {\n ...this.state,\n ...(await initHttpOperation({\n lro: this.lro,\n stateProxy,\n resourceLocationConfig: this.lroResourceLocationConfig,\n processResult: this.processResult,\n setErrorAsResult: this.setErrorAsResult,\n })),\n };\n }\n const updateState = this.updateState;\n const isDone = this.isDone;\n\n if (!this.state.isCompleted && this.state.error === undefined) {\n await pollHttpOperation({\n lro: this.lro,\n state: this.state,\n stateProxy,\n processResult: this.processResult,\n updateState: updateState\n ? (state, { rawResponse }) => updateState(state, rawResponse)\n : undefined,\n isDone: isDone\n ? ({ flatResponse }, state) => isDone(flatResponse as TResult, state)\n : undefined,\n options,\n setDelay: (intervalInMs) => {\n this.pollerConfig!.intervalInMs = intervalInMs;\n },\n setErrorAsResult: this.setErrorAsResult,\n });\n }\n options?.fireProgress?.(this.state);\n return this;\n }\n\n async cancel(): Promise> {\n logger.error(\"`cancelOperation` is deprecated because it wasn't implemented\");\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/models.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.d.ts new file mode 100644 index 0000000..6ecd744 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.d.ts @@ -0,0 +1,66 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollOperationState } from "./pollOperation.js"; +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * @deprecated `cancelOperation` has been deprecated because it was not implemented. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/models.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.d.ts.map new file mode 100644 index 0000000..f8e2c78 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAExD;;GAEG;AAEH,MAAM,WAAW,UAAU,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO;IAC7E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;;OAGG;IACH,eAAe,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5E;;;;OAIG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/models.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.js new file mode 100644 index 0000000..783931f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/models.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.js.map new file mode 100644 index 0000000..3b26ad6 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollOperationState } from \"./pollOperation.js\";\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n * @deprecated `cancelOperation` has been deprecated because it was not implemented.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.d.ts new file mode 100644 index 0000000..9d1b341 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.d.ts @@ -0,0 +1,81 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + * + * @deprecated `cancel` has been deprecated because it was not implemented. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} +//# sourceMappingURL=pollOperation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.d.ts.map new file mode 100644 index 0000000..257ba63 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.d.ts","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;;;;;;;;GAUG;AACH,MAAM,WAAW,kBAAkB,CAAC,OAAO;IACzC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,WAAW,aAAa,CAAC,MAAM,EAAE,OAAO;IAC5C;;;;OAIG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;;;;;;OAQG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QACf,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE5C;;;;;;;;;;OAUG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE7F;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js new file mode 100644 index 0000000..c082a05 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=pollOperation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js.map new file mode 100644 index 0000000..ad9eb5b --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.js","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * PollOperationState contains an opinionated list of the smallest set of properties needed\n * to define any long running operation poller.\n *\n * While the Poller class works as the local control mechanism to start triggering, wait for,\n * and potentially cancel a long running operation, the PollOperationState documents the status\n * of the remote long running operation.\n *\n * It should be updated at least when the operation starts, when it's finished, and when it's cancelled.\n * Though, implementations can have any other number of properties that can be updated by other reasons.\n */\nexport interface PollOperationState {\n /**\n * True if the operation has started.\n */\n isStarted?: boolean;\n /**\n * True if the operation has been completed.\n */\n isCompleted?: boolean;\n /**\n * True if the operation has been cancelled.\n */\n isCancelled?: boolean;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation concluded in a result of an expected type.\n */\n result?: TResult;\n}\n\n/**\n * PollOperation is an interface that defines how to update the local reference of the state of the remote\n * long running operation, just as well as how to request the cancellation of the same operation.\n *\n * It also has a method to serialize the operation so that it can be stored and resumed at any time.\n */\nexport interface PollOperation {\n /**\n * The state of the operation.\n * It will be used to store the basic properties of PollOperationState,\n * plus any custom property that the implementation may require.\n */\n state: TState;\n\n /**\n * Defines how to request the remote service for updates on the status of the long running operation.\n *\n * It optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n * Also optionally receives a \"fireProgress\" function, which, if called, is responsible for triggering the\n * poller's onProgress callbacks.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise>;\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * It returns a promise that should be resolved with an updated version of the poller's operation.\n *\n * @param options - Optional properties passed to the operation's update method.\n *\n * @deprecated `cancel` has been deprecated because it was not implemented.\n */\n cancel(options?: { abortSignal?: AbortSignalLike }): Promise>;\n\n /**\n * Serializes the operation.\n * Useful when wanting to create a poller that monitors an existing operation.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.d.ts b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.d.ts new file mode 100644 index 0000000..e748266 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.d.ts @@ -0,0 +1,327 @@ +import { PollOperation, PollOperationState } from "./pollOperation.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollerLike } from "./models.js"; +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export type PollProgressCallback = (state: TState) => void; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + /** controls whether to throw an error if the operation failed or was canceled. */ + protected resolveOnUnsuccessful: boolean; + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-util"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + private processUpdatedState; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.d.ts.map new file mode 100644 index 0000000..8ae1b13 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;;GAIG;AACH,MAAM,MAAM,oBAAoB,CAAC,MAAM,IAAI,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;AAEnE;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,KAAK;gBAC/B,OAAO,EAAE,MAAM;CAK5B;AAED;;;GAGG;AACH,qBAAa,oBAAqB,SAAQ,KAAK;gBACjC,OAAO,EAAE,MAAM;CAK5B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AAEH,8BAAsB,MAAM,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO,CAC9E,YAAW,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC;IAEtC,kFAAkF;IAClF,SAAS,CAAC,qBAAqB,EAAE,OAAO,CAAS;IACjD,OAAO,CAAC,OAAO,CAAiB;IAChC,OAAO,CAAC,OAAO,CAAC,CAA2B;IAC3C,OAAO,CAAC,MAAM,CAAC,CAAqE;IACpF,OAAO,CAAC,eAAe,CAAC,CAAgB;IACxC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,OAAO,CAAmB;IAClC,OAAO,CAAC,qBAAqB,CAAsC;IAEnE;;;OAGG;IACH,SAAS,CAAC,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;gBACS,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAmBrD;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,SAAS,CAAC,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEzC;;;OAGG;YACW,YAAY;IAU1B;;;;;;;OAOG;YACW,QAAQ;IAUtB;;;;;;;OAOG;IACH,OAAO,CAAC,YAAY;IAMpB;;OAEG;YACW,UAAU;IAIxB;;;;;;;OAOG;IACI,IAAI,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAW3E,OAAO,CAAC,mBAAmB;IA0B3B;;OAEG;IACU,aAAa,CACxB,WAAW,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAClD,OAAO,CAAC,OAAO,CAAC;IAUnB;;;;;OAKG;IACI,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB;IAOtE;;OAEG;IACI,MAAM,IAAI,OAAO;IAKxB;;OAEG;IACI,WAAW,IAAI,IAAI;IAS1B;;OAEG;IACI,SAAS,IAAI,OAAO;IAI3B;;;;;;;;OAQG;IACI,eAAe,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAStF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB,IAAI,MAAM;IAIlC;;;;;OAKG;IACI,SAAS,IAAI,OAAO,GAAG,SAAS;IAKvC;;;OAGG;IACI,QAAQ,IAAI,MAAM;CAG1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js new file mode 100644 index 0000000..1e27a02 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js @@ -0,0 +1,413 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Poller = exports.PollerCancelledError = exports.PollerStoppedError = void 0; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +exports.PollerStoppedError = PollerStoppedError; +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +exports.PollerCancelledError = PollerCancelledError; +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +class Poller { + /** controls whether to throw an error if the operation failed or was canceled. */ + resolveOnUnsuccessful = false; + stopped = true; + resolve; + reject; + pollOncePromise; + cancelPromise; + promise; + pollProgressCallbacks = []; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + operation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } + } + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + // This is needed because the state could have been updated by + // `cancelOperation`, e.g. the operation is canceled or an error occurred. + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +exports.Poller = Poller; +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js.map b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js.map new file mode 100644 index 0000000..ac088b0 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAclC;;;GAGG;AACH,MAAa,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;IAC5D,CAAC;CACF;AAND,gDAMC;AAED;;;GAGG;AACH,MAAa,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;IAC9D,CAAC;CACF;AAND,oDAMC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AACH,gDAAgD;AAChD,MAAsB,MAAM;IAG1B,kFAAkF;IACxE,qBAAqB,GAAY,KAAK,CAAC;IACzC,OAAO,GAAY,IAAI,CAAC;IACxB,OAAO,CAA4B;IACnC,MAAM,CAAsE;IAC5E,eAAe,CAAiB;IAChC,aAAa,CAAiB;IAC9B,OAAO,CAAmB;IAC1B,qBAAqB,GAAmC,EAAE,CAAC;IAEnE;;;OAGG;IACO,SAAS,CAAiC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;IACH,YAAY,SAAyC;QACnD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,EAC1E,EAAE;YACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACvB,CAAC,CACF,CAAC;QACF,mFAAmF;QACnF,sFAAsF;QACtF,mFAAmF;QACnF,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE;YACtB,yBAAyB;QAC3B,CAAC,CAAC,CAAC;IACL,CAAC;IAyBD;;;OAGG;IACK,KAAK,CAAC,YAAY,CAAC,cAAiD,EAAE;QAC5E,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;QACvB,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YAC3C,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAC7B,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QACrB,CAAC;IACH,CAAC;IAED;;;;;;;OAOG;IACK,KAAK,CAAC,QAAQ,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YACnB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;gBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;aAC3C,CAAC,CAAC;QACL,CAAC;QACD,IAAI,CAAC,mBAAmB,EAAE,CAAC;IAC7B,CAAC;IAED;;;;;;;OAOG;IACK,YAAY,CAAC,KAAa;QAChC,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE,CAAC;YAClD,QAAQ,CAAC,KAAK,CAAC,CAAC;QAClB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,UAAU,CAAC,UAA6C,EAAE;QACtE,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACxD,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CAAC,UAA6C,EAAE;QACzD,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,GAAS,EAAE;gBACtC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;YACnC,CAAC,CAAC;YACF,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3F,CAAC;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;IAC9B,CAAC;IAEO,mBAAmB;QACzB,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,IAAI,CAAC,MAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBACzC,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC;YACnC,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;YACrC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,MAAM,KAAK,GAAG,IAAI,oBAAoB,CAAC,wBAAwB,CAAC,CAAC;gBACjE,IAAI,CAAC,MAAO,CAAC,KAAK,CAAC,CAAC;gBACpB,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YAClC,uEAAuE;YACvE,uEAAuE;YACvE,oEAAoE;YACpE,uEAAuE;YACvE,cAAc;YACd,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAa,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,aAAa,CACxB,cAAiD,EAAE;QAEnD,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,CAAC;QACD,8DAA8D;QAC9D,0EAA0E;QAC1E,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAC3B,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;OAKG;IACI,UAAU,CAAC,QAAiC;QACjD,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC1C,OAAO,GAAS,EAAE;YAChB,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;QACxF,CAAC,CAAC;IACJ,CAAC;IAED;;OAEG;IACI,MAAM;QACX,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;gBAChB,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;YACxE,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC;YACxB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QAChD,CAAC;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;QAC3D,CAAC;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB;QACtB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B,CAAC;IAED;;;;;OAKG;IACI,SAAS;QACd,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;IACnC,CAAC;CACF;AA/WD,wBA+WC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollerLike } from \"./models.js\";\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When the operation is cancelled, the poller will be rejected with an instance\n * of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n /** controls whether to throw an error if the operation failed or was canceled. */\n protected resolveOnUnsuccessful: boolean = false;\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void,\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n },\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-util\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(pollOptions: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll(pollOptions);\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n }\n this.processUpdatedState();\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n private processUpdatedState(): void {\n if (this.operation.state.error) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n this.reject!(this.operation.state.error);\n throw this.operation.state.error;\n }\n }\n if (this.operation.state.isCancelled) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n const error = new PollerCancelledError(\"Operation was canceled\");\n this.reject!(error);\n throw error;\n }\n }\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.getResult() as TResult);\n }\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(\n pollOptions: { abortSignal?: AbortSignalLike } = {},\n ): Promise {\n if (this.stopped) {\n this.startPolling(pollOptions).catch(this.reject);\n }\n // This is needed because the state could have been updated by\n // `cancelOperation`, e.g. the operation is canceled or an error occurred.\n this.processUpdatedState();\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/logger.d.ts b/node_modules/@azure/core-lro/dist/commonjs/logger.d.ts new file mode 100644 index 0000000..52138fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/logger.d.ts @@ -0,0 +1,6 @@ +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=logger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/logger.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/logger.d.ts.map new file mode 100644 index 0000000..3763b49 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/logger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,eAAO,MAAM,MAAM,qCAAiC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/logger.js b/node_modules/@azure/core-lro/dist/commonjs/logger.js new file mode 100644 index 0000000..3564ffa --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/logger.js @@ -0,0 +1,12 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.logger = void 0; +const logger_1 = require("@azure/logger"); +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +exports.logger = (0, logger_1.createClientLogger)("core-lro"); +//# sourceMappingURL=logger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/logger.js.map b/node_modules/@azure/core-lro/dist/commonjs/logger.js.map new file mode 100644 index 0000000..146cb94 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/logger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,0CAAmD;AAEnD;;;GAGG;AACU,QAAA,MAAM,GAAG,IAAA,2BAAkB,EAAC,UAAU,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/package.json b/node_modules/@azure/core-lro/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/constants.d.ts b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.d.ts new file mode 100644 index 0000000..a4e88ee --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.d.ts @@ -0,0 +1,9 @@ +/** + * The default time interval to wait before sending the next polling request. + */ +export declare const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export declare const terminalStates: string[]; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/constants.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.d.ts.map new file mode 100644 index 0000000..206a7fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,mBAAmB,OAAO,CAAC;AACxC;;GAEG;AACH,eAAO,MAAM,cAAc,UAAsC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js new file mode 100644 index 0000000..c469842 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.terminalStates = exports.POLL_INTERVAL_IN_MS = void 0; +/** + * The default time interval to wait before sending the next polling request. + */ +exports.POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +exports.terminalStates = ["succeeded", "canceled", "failed"]; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js.map b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js.map new file mode 100644 index 0000000..ae6c2c6 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;GAEG;AACU,QAAA,mBAAmB,GAAG,IAAI,CAAC;AACxC;;GAEG;AACU,QAAA,cAAc,GAAG,CAAC,WAAW,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The default time interval to wait before sending the next polling request.\n */\nexport const POLL_INTERVAL_IN_MS = 2000;\n/**\n * The closed set of terminal states.\n */\nexport const terminalStates = [\"succeeded\", \"canceled\", \"failed\"];\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/models.d.ts b/node_modules/@azure/core-lro/dist/commonjs/poller/models.d.ts new file mode 100644 index 0000000..80845ac --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/models.d.ts @@ -0,0 +1,221 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * Configurations for how to poll the operation and to check whether it has + * terminated. + */ +export interface OperationConfig { + /** The operation location */ + operationLocation?: string; + /** The resource location */ + resourceLocation?: string; + /** metadata about the operation */ + metadata?: Record; +} +/** + * The description of an operation. + */ +export interface Operation { + /** + * Sends the initiation request and returns, in addition to the response, the + * operation location, the potential resource location, and a set of metadata. + */ + init: () => Promise; + /** + * Sends the polling request. + */ + poll: (location: string, options?: TOptions) => Promise; +} +/** + * Type of a restorable long-running operation. + */ +export type RestorableOperationState = T & { + /** The operation configuration */ + config: OperationConfig; +}; +/** + * Options for `createPoller`. + */ +export interface CreatePollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: TResponse, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: TResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; +} +export interface LroError { + code: string; + innererror?: InnerError; + message: string; +} +export interface InnerError { + code: string; + message: string; + innererror?: InnerError; +} +/** + * Options for `buildCreatePoller`. + */ +export interface BuildCreatePollerOptions { + /** + * Gets the status of the operation from the response received when the + * operation was initialized. Note that the operation could be already in + * a terminal state at this time. + */ + getStatusFromInitialResponse: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + /** + * Gets the status of the operation from a response received when the + * operation was polled. + */ + getStatusFromPollResponse: (response: TResponse, state: RestorableOperationState) => OperationStatus; + /** + * Determines if the input error is an operation error. + */ + isOperationError: (error: Error) => boolean; + /** + * Gets the updated operation location from polling responses. + */ + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets the resource location from a response. + */ + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets from the response the time interval the service suggests the client to + * wait before sending the next polling request. + */ + getPollingInterval?: (response: TResponse) => number | undefined; + /** + * Extracts an error model from a response. + */ + getError?: (response: TResponse) => LroError | undefined; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful: boolean; +} +/** + * The set of possible states an operation can be in at any given time. + */ +export type OperationStatus = "notStarted" | "running" | "succeeded" | "canceled" | "failed"; +/** + * While the poller works as the local control mechanism to start triggering and + * wait for a long-running operation, OperationState documents the status of + * the remote long-running operation. It gets updated after each poll. + */ +export interface OperationState { + /** + * The current status of the operation. + */ + status: OperationStatus; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation produced a result of the expected type. + */ + result?: TResult; +} +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export type CancelOnProgress = () => void; +/** + * A simple poller interface. + */ +export interface SimplePollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Returns the state of the operation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +/** + * A state proxy that allows poller implementation to abstract away the operation + * state. This is useful to implement `lroEngine` and `createPoller` in a modular + * way. + */ +export interface StateProxy { + initState: (config: OperationConfig) => RestorableOperationState; + setRunning: (state: TState) => void; + setCanceled: (state: TState) => void; + setResult: (state: TState, result: TResult) => void; + setError: (state: TState, error: Error) => void; + setFailed: (state: TState) => void; + setSucceeded: (state: TState) => void; + isRunning: (state: TState) => boolean; + isCanceled: (state: TState) => boolean; + getResult: (state: TState) => TResult | undefined; + getError: (state: TState) => Error | undefined; + isFailed: (state: TState) => boolean; + isSucceeded: (state: TState) => boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/models.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/poller/models.d.ts.map new file mode 100644 index 0000000..6e85a3f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,6BAA6B;IAC7B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,4BAA4B;IAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,mCAAmC;IACnC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACnC;AAED;;GAEG;AACH,MAAM,WAAW,SAAS,CAAC,SAAS,EAAE,QAAQ;IAC5C;;;OAGG;IACH,IAAI,EAAE,MAAM,OAAO,CACjB,eAAe,GAAG;QAChB,QAAQ,EAAE,SAAS,CAAC;KACrB,CACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,KAAK,OAAO,CAAC,SAAS,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,MAAM,wBAAwB,CAAC,CAAC,IAAI,CAAC,GAAG;IAC5C,kCAAkC;IAClC,MAAM,EAAE,eAAe,CAAC;CACzB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM;IAC7D;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;CAC7D;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB,CAAC,SAAS,EAAE,MAAM;IACzD;;;;OAIG;IACH,4BAA4B,EAAE,CAAC,MAAM,EAAE;QACrC,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB;;;OAGG;IACH,yBAAyB,EAAE,CACzB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB;;OAEG;IACH,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C;;OAEG;IACH,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;OAEG;IACH,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD;;OAEG;IACH,qBAAqB,EAAE,OAAO,CAAC;CAChC;AAED;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,YAAY,GAAG,SAAS,GAAG,WAAW,GAAG,UAAU,GAAG,QAAQ,CAAC;AAE7F;;;;GAIG;AACH,MAAM,WAAW,cAAc,CAAC,OAAO;IACrC;;OAEG;IACH,MAAM,EAAE,eAAe,CAAC;IACxB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC;AAE1C;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAAE,OAAO;IAC/E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;OAEG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU,CAAC,MAAM,EAAE,OAAO;IACzC,SAAS,EAAE,CAAC,MAAM,EAAE,eAAe,KAAK,wBAAwB,CAAC,MAAM,CAAC,CAAC;IAEzE,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,KAAK,IAAI,CAAC;IACpD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IAChD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,YAAY,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACtC,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACvC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,SAAS,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,KAAK,GAAG,SAAS,CAAC;IAC/C,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACrC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;CACzC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/models.js b/node_modules/@azure/core-lro/dist/commonjs/poller/models.js new file mode 100644 index 0000000..783931f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/models.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/models.js.map b/node_modules/@azure/core-lro/dist/commonjs/poller/models.js.map new file mode 100644 index 0000000..f569376 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Configurations for how to poll the operation and to check whether it has\n * terminated.\n */\nexport interface OperationConfig {\n /** The operation location */\n operationLocation?: string;\n /** The resource location */\n resourceLocation?: string;\n /** metadata about the operation */\n metadata?: Record;\n}\n\n/**\n * The description of an operation.\n */\nexport interface Operation {\n /**\n * Sends the initiation request and returns, in addition to the response, the\n * operation location, the potential resource location, and a set of metadata.\n */\n init: () => Promise<\n OperationConfig & {\n response: TResponse;\n }\n >;\n /**\n * Sends the polling request.\n */\n poll: (location: string, options?: TOptions) => Promise;\n}\n\n/**\n * Type of a restorable long-running operation.\n */\nexport type RestorableOperationState = T & {\n /** The operation configuration */\n config: OperationConfig;\n};\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreatePollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: TResponse, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: TResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n}\n\nexport interface LroError {\n code: string;\n innererror?: InnerError;\n message: string;\n}\n\nexport interface InnerError {\n code: string;\n message: string;\n innererror?: InnerError;\n}\n\n/**\n * Options for `buildCreatePoller`.\n */\nexport interface BuildCreatePollerOptions {\n /**\n * Gets the status of the operation from the response received when the\n * operation was initialized. Note that the operation could be already in\n * a terminal state at this time.\n */\n getStatusFromInitialResponse: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n /**\n * Gets the status of the operation from a response received when the\n * operation was polled.\n */\n getStatusFromPollResponse: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n /**\n * Determines if the input error is an operation error.\n */\n isOperationError: (error: Error) => boolean;\n /**\n * Gets the updated operation location from polling responses.\n */\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets the resource location from a response.\n */\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets from the response the time interval the service suggests the client to\n * wait before sending the next polling request.\n */\n getPollingInterval?: (response: TResponse) => number | undefined;\n /**\n * Extracts an error model from a response.\n */\n getError?: (response: TResponse) => LroError | undefined;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful: boolean;\n}\n\n/**\n * The set of possible states an operation can be in at any given time.\n */\nexport type OperationStatus = \"notStarted\" | \"running\" | \"succeeded\" | \"canceled\" | \"failed\";\n\n/**\n * While the poller works as the local control mechanism to start triggering and\n * wait for a long-running operation, OperationState documents the status of\n * the remote long-running operation. It gets updated after each poll.\n */\nexport interface OperationState {\n /**\n * The current status of the operation.\n */\n status: OperationStatus;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation produced a result of the expected type.\n */\n result?: TResult;\n}\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * A simple poller interface.\n */\nexport interface SimplePollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Returns the state of the operation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A state proxy that allows poller implementation to abstract away the operation\n * state. This is useful to implement `lroEngine` and `createPoller` in a modular\n * way.\n */\nexport interface StateProxy {\n initState: (config: OperationConfig) => RestorableOperationState;\n\n setRunning: (state: TState) => void;\n setCanceled: (state: TState) => void;\n setResult: (state: TState, result: TResult) => void;\n setError: (state: TState, error: Error) => void;\n setFailed: (state: TState) => void;\n setSucceeded: (state: TState) => void;\n\n isRunning: (state: TState) => boolean;\n isCanceled: (state: TState) => boolean;\n getResult: (state: TState) => TResult | undefined;\n getError: (state: TState) => Error | undefined;\n isFailed: (state: TState) => boolean;\n isSucceeded: (state: TState) => boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/operation.d.ts b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.d.ts new file mode 100644 index 0000000..a23a90f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.d.ts @@ -0,0 +1,40 @@ +import { LroError, Operation, OperationStatus, RestorableOperationState, StateProxy } from "./models.js"; +/** + * Deserializes the state + */ +export declare function deserializeState(serializedState: string): RestorableOperationState; +/** + * Initiates the long-running operation. + */ +export declare function initOperation(inputs: { + init: Operation["init"]; + stateProxy: StateProxy; + getOperationStatus: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + processResult?: (result: TResponse, state: TState) => TResult; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + setErrorAsResult: boolean; +}): Promise>; +/** Polls the long-running operation. */ +export declare function pollOperation(inputs: { + poll: Operation["poll"]; + stateProxy: StateProxy; + state: RestorableOperationState; + getOperationStatus: (response: TResponse, state: RestorableOperationState) => OperationStatus; + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + isOperationError: (error: Error) => boolean; + getPollingInterval?: (response: TResponse) => number | undefined; + setDelay: (intervalInMs: number) => void; + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + processResult?: (result: TResponse, state: TState) => TResult; + getError?: (response: TResponse) => LroError | undefined; + updateState?: (state: TState, lastResponse: TResponse) => void; + isDone?: (lastResponse: TResponse, state: TState) => boolean; + setErrorAsResult: boolean; + options?: TOptions; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/operation.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.d.ts.map new file mode 100644 index 0000000..d8a62db --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,QAAQ,EAER,SAAS,EACT,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,aAAa,CAAC;AAIrB;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,MAAM,EACrC,eAAe,EAAE,MAAM,GACtB,wBAAwB,CAAC,MAAM,CAAC,CAMlC;AAuGD;;GAEG;AACH,wBAAsB,aAAa,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IACtE,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC;IAC5C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAAC,MAAM,EAAE;QAC3B,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAqB5C;AA4DD,wCAAwC;AACxC,wBAAsB,aAAa,CAAC,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;IAChF,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC;IAC7C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAClB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC7D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,OAAO,CAAC,EAAE,QAAQ,CAAC;CACpB,GAAG,OAAO,CAAC,IAAI,CAAC,CAsDhB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js new file mode 100644 index 0000000..d050c61 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js @@ -0,0 +1,172 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pollOperation = exports.initOperation = exports.deserializeState = void 0; +const logger_js_1 = require("../logger.js"); +const constants_js_1 = require("./constants.js"); +/** + * Deserializes the state + */ +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } +} +exports.deserializeState = deserializeState; +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; +} +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError?.(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger_js_1.logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if (isDone?.(response, state) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation?.(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger_js_1.logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +exports.initOperation = initOperation; +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger_js_1.logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } + } + return { response, status }; +} +/** Polls the long-running operation. */ +async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!constants_js_1.terminalStates.includes(status)) { + const intervalInMs = getPollingInterval?.(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation?.(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation?.(location, isUpdated); + } + else + withOperationLocation?.(operationLocation, false); + } + updateState?.(state, response); + } +} +exports.pollOperation = pollOperation; +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js.map b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js.map new file mode 100644 index 0000000..c146c85 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAUlC,4CAAsC;AACtC,iDAAgD;AAEhD;;GAEG;AACH,SAAgB,gBAAgB,CAC9B,eAAuB;IAEvB,IAAI,CAAC;QACH,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;IAC3C,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAI,KAAK,CAAC,sCAAsC,eAAe,EAAE,CAAC,CAAC;IAC3E,CAAC;AACH,CAAC;AARD,4CAQC;AAED,SAAS,aAAa,CAAkB,MAIvC;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IACvD,OAAO,CAAC,KAAY,EAAE,EAAE;QACtB,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC;YAC5B,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YAClC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QAC9B,CAAC;QACD,MAAM,KAAK,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;QAC9B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;IAC1B,CAAC;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED,SAAS,aAAa,CAAC,GAAa;IAIlC,IAAI,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC;IAC1B,IAAI,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACpB,IAAI,MAAM,GAAG,GAAiB,CAAC;IAC/B,OAAO,MAAM,CAAC,UAAU,EAAE,CAAC;QACzB,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC;QAC3B,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACnB,OAAO,GAAG,0BAA0B,CAAC,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAChE,CAAC;IACD,OAAO;QACL,IAAI;QACJ,OAAO;KACR,CAAC;AACJ,CAAC;AAED,SAAS,sBAAsB,CAA6B,MAS3D;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,QAAQ,EAAE,gBAAgB,EAAE,GAC9F,MAAM,CAAC;IACT,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,WAAW,CAAC,CAAC,CAAC;YACjB,UAAU,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;YAC/B,MAAM;QACR,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,GAAG,GAAG,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC;YACjC,IAAI,OAAO,GAAG,EAAE,CAAC;YACjB,IAAI,GAAG,EAAE,CAAC;gBACR,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;gBAC7C,OAAO,GAAG,KAAK,IAAI,KAAK,OAAO,EAAE,CAAC;YACpC,CAAC;YACD,MAAM,MAAM,GAAG,wCAAwC,OAAO,EAAE,CAAC;YACjE,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;YAC9C,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC5B,kBAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACvB,MAAM;QACR,CAAC;QACD,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,UAAU,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;YAC9B,MAAM;QACR,CAAC;IACH,CAAC;IACD,IACE,MAAM,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC;QACzB,CAAC,MAAM,KAAK,SAAS;YACnB,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EACxF,CAAC;QACD,UAAU,CAAC,SAAS,CAClB,KAAK,EACL,WAAW,CAAC;YACV,QAAQ;YACR,KAAK;YACL,aAAa;SACd,CAAC,CACH,CAAC;IACJ,CAAC;AACH,CAAC;AAED,SAAS,WAAW,CAA6B,MAIhD;IACC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;IAClD,OAAO,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAE,QAA+B,CAAC;AAC3F,CAAC;AAED;;GAEG;AACI,KAAK,UAAU,aAAa,CAA6B,MAW/D;IACC,MAAM,EACJ,IAAI,EACJ,UAAU,EACV,aAAa,EACb,kBAAkB,EAClB,qBAAqB,EACrB,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC;IACjF,IAAI,iBAAiB;QAAE,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;IACzE,MAAM,MAAM,GAAG;QACb,QAAQ;QACR,iBAAiB;QACjB,gBAAgB;KACjB,CAAC;IACF,kBAAM,CAAC,OAAO,CAAC,6BAA6B,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,KAAK,GAAG,UAAU,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,MAAM,GAAG,kBAAkB,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAC1E,sBAAsB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,gBAAgB,EAAE,aAAa,EAAE,CAAC,CAAC;IACjG,OAAO,KAAK,CAAC;AACf,CAAC;AAhCD,sCAgCC;AAED,KAAK,UAAU,mBAAmB,CAAuC,MAexE;IAIC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,iBAAiB,EACjB,kBAAkB,EAClB,mBAAmB,EACnB,gBAAgB,EAChB,OAAO,GACR,GAAG,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC,KAAK,CAC3D,aAAa,CAAC;QACZ,KAAK;QACL,UAAU;QACV,gBAAgB;KACjB,CAAC,CACH,CAAC;IACF,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IACnD,kBAAM,CAAC,OAAO,CACZ,iCACE,KAAK,CAAC,MAAM,CAAC,iBACf,yBAAyB,MAAM,uBAC7B,6BAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAChD,EAAE,CACH,CAAC;IACF,IAAI,MAAM,KAAK,WAAW,EAAE,CAAC;QAC3B,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC9D,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,OAAO;gBACL,QAAQ,EAAE,MAAM,IAAI,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAC1C,aAAa,CAAC,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,CAAC,CACvD;gBACD,MAAM;aACP,CAAC;QACJ,CAAC;IACH,CAAC;IACD,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;AAC9B,CAAC;AAED,wCAAwC;AACjC,KAAK,UAAU,aAAa,CAAuC,MA0BzE;IACC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,OAAO,EACP,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,aAAa,EACb,QAAQ,EACR,WAAW,EACX,QAAQ,EACR,MAAM,EACN,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;IAC3C,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;QACpC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAmB,CAAC;YACrD,IAAI;YACJ,kBAAkB;YAClB,KAAK;YACL,UAAU;YACV,iBAAiB;YACjB,mBAAmB;YACnB,gBAAgB;YAChB,OAAO;SACR,CAAC,CAAC;QACH,sBAAsB,CAAC;YACrB,MAAM;YACN,QAAQ;YACR,KAAK;YACL,UAAU;YACV,MAAM;YACN,aAAa;YACb,QAAQ;YACR,gBAAgB;SACjB,CAAC,CAAC;QAEH,IAAI,CAAC,6BAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrC,MAAM,YAAY,GAAG,kBAAkB,EAAE,CAAC,QAAQ,CAAC,CAAC;YACpD,IAAI,YAAY;gBAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;YACzC,MAAM,QAAQ,GAAG,oBAAoB,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YACzD,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;gBAC3B,MAAM,SAAS,GAAG,iBAAiB,KAAK,QAAQ,CAAC;gBACjD,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,QAAQ,CAAC;gBAC1C,qBAAqB,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;YAC/C,CAAC;;gBAAM,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC3D,CAAC;QACD,WAAW,EAAE,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IACjC,CAAC;AACH,CAAC;AAhFD,sCAgFC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroError,\n InnerError,\n Operation,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"./models.js\";\nimport { logger } from \"../logger.js\";\nimport { terminalStates } from \"./constants.js\";\n\n/**\n * Deserializes the state\n */\nexport function deserializeState(\n serializedState: string,\n): RestorableOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`Unable to deserialize input state: ${serializedState}`);\n }\n}\n\nfunction setStateError(inputs: {\n state: TState;\n stateProxy: StateProxy;\n isOperationError: (error: Error) => boolean;\n}): (error: Error) => never {\n const { state, stateProxy, isOperationError } = inputs;\n return (error: Error) => {\n if (isOperationError(error)) {\n stateProxy.setError(state, error);\n stateProxy.setFailed(state);\n }\n throw error;\n };\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\nfunction simplifyError(err: LroError): {\n code: string;\n message: string;\n} {\n let message = err.message;\n let code = err.code;\n let curErr = err as InnerError;\n while (curErr.innererror) {\n curErr = curErr.innererror;\n code = curErr.code;\n message = appendReadableErrorMessage(message, curErr.message);\n }\n return {\n code,\n message,\n };\n}\n\nfunction processOperationStatus(result: {\n status: OperationStatus;\n response: TResponse;\n state: RestorableOperationState;\n stateProxy: StateProxy;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n}): void {\n const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } =\n result;\n switch (status) {\n case \"succeeded\": {\n stateProxy.setSucceeded(state);\n break;\n }\n case \"failed\": {\n const err = getError?.(response);\n let postfix = \"\";\n if (err) {\n const { code, message } = simplifyError(err);\n postfix = `. ${code}. ${message}`;\n }\n const errStr = `The long-running operation has failed${postfix}`;\n stateProxy.setError(state, new Error(errStr));\n stateProxy.setFailed(state);\n logger.warning(errStr);\n break;\n }\n case \"canceled\": {\n stateProxy.setCanceled(state);\n break;\n }\n }\n if (\n isDone?.(response, state) ||\n (isDone === undefined &&\n [\"succeeded\", \"canceled\"].concat(setErrorAsResult ? [] : [\"failed\"]).includes(status))\n ) {\n stateProxy.setResult(\n state,\n buildResult({\n response,\n state,\n processResult,\n }),\n );\n }\n}\n\nfunction buildResult(inputs: {\n response: TResponse;\n state: TState;\n processResult?: (result: TResponse, state: TState) => TResult;\n}): TResult {\n const { processResult, response, state } = inputs;\n return processResult ? processResult(response, state) : (response as unknown as TResult);\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initOperation(inputs: {\n init: Operation[\"init\"];\n stateProxy: StateProxy;\n getOperationStatus: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n processResult?: (result: TResponse, state: TState) => TResult;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n setErrorAsResult: boolean;\n}): Promise> {\n const {\n init,\n stateProxy,\n processResult,\n getOperationStatus,\n withOperationLocation,\n setErrorAsResult,\n } = inputs;\n const { operationLocation, resourceLocation, metadata, response } = await init();\n if (operationLocation) withOperationLocation?.(operationLocation, false);\n const config = {\n metadata,\n operationLocation,\n resourceLocation,\n };\n logger.verbose(`LRO: Operation description:`, config);\n const state = stateProxy.initState(config);\n const status = getOperationStatus({ response, state, operationLocation });\n processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult });\n return state;\n}\n\nasync function pollOperationHelper(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n operationLocation: string;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n options?: TOptions;\n}): Promise<{\n status: OperationStatus;\n response: TResponse;\n}> {\n const {\n poll,\n state,\n stateProxy,\n operationLocation,\n getOperationStatus,\n getResourceLocation,\n isOperationError,\n options,\n } = inputs;\n const response = await poll(operationLocation, options).catch(\n setStateError({\n state,\n stateProxy,\n isOperationError,\n }),\n );\n const status = getOperationStatus(response, state);\n logger.verbose(\n `LRO: Status:\\n\\tPolling from: ${\n state.config.operationLocation\n }\\n\\tOperation status: ${status}\\n\\tPolling status: ${\n terminalStates.includes(status) ? \"Stopped\" : \"Running\"\n }`,\n );\n if (status === \"succeeded\") {\n const resourceLocation = getResourceLocation(response, state);\n if (resourceLocation !== undefined) {\n return {\n response: await poll(resourceLocation).catch(\n setStateError({ state, stateProxy, isOperationError }),\n ),\n status,\n };\n }\n }\n return { response, status };\n}\n\n/** Polls the long-running operation. */\nexport async function pollOperation(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n getPollingInterval?: (response: TResponse) => number | undefined;\n setDelay: (intervalInMs: number) => void;\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n updateState?: (state: TState, lastResponse: TResponse) => void;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n options?: TOptions;\n}): Promise {\n const {\n poll,\n state,\n stateProxy,\n options,\n getOperationStatus,\n getResourceLocation,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n processResult,\n getError,\n updateState,\n setDelay,\n isDone,\n setErrorAsResult,\n } = inputs;\n const { operationLocation } = state.config;\n if (operationLocation !== undefined) {\n const { response, status } = await pollOperationHelper({\n poll,\n getOperationStatus,\n state,\n stateProxy,\n operationLocation,\n getResourceLocation,\n isOperationError,\n options,\n });\n processOperationStatus({\n status,\n response,\n state,\n stateProxy,\n isDone,\n processResult,\n getError,\n setErrorAsResult,\n });\n\n if (!terminalStates.includes(status)) {\n const intervalInMs = getPollingInterval?.(response);\n if (intervalInMs) setDelay(intervalInMs);\n const location = getOperationLocation?.(response, state);\n if (location !== undefined) {\n const isUpdated = operationLocation !== location;\n state.config.operationLocation = location;\n withOperationLocation?.(location, isUpdated);\n } else withOperationLocation?.(operationLocation, false);\n }\n updateState?.(state, response);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/poller.d.ts b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.d.ts new file mode 100644 index 0000000..dc9e8fa --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.d.ts @@ -0,0 +1,9 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { BuildCreatePollerOptions, CreatePollerOptions, Operation, OperationState, SimplePollerLike } from "./models.js"; +/** + * Returns a poller factory. + */ +export declare function buildCreatePoller>(inputs: BuildCreatePollerOptions): (lro: Operation, options?: CreatePollerOptions) => Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/poller.d.ts.map b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.d.ts.map new file mode 100644 index 0000000..b300b2e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EACL,wBAAwB,EACxB,mBAAmB,EACnB,SAAS,EACT,cAAc,EAEd,gBAAgB,EAEjB,MAAM,aAAa,CAAC;AA8BrB;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAC1F,MAAM,EAAE,wBAAwB,CAAC,SAAS,EAAE,MAAM,CAAC,GAClD,CACD,GAAG,EAAE,SAAS,CAAC,SAAS,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,CAAC,EAC5D,OAAO,CAAC,EAAE,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC,KACtD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CA6J9C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js new file mode 100644 index 0000000..8df8b88 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js @@ -0,0 +1,174 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildCreatePoller = void 0; +const operation_js_1 = require("./operation.js"); +const constants_js_1 = require("./constants.js"); +const core_util_1 = require("@azure/core-util"); +const createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => (state.status = "canceled"), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.status = "running"), + setSucceeded: (state) => (state.status = "succeeded"), + setFailed: (state) => (state.status = "failed"), + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded", +}); +/** + * Returns a poller factory. + */ +function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback + ? (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() + : undefined; + const state = restoreFrom + ? (0, operation_js_1.deserializeState)(restoreFrom) + : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === undefined, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state, + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + // In the future we can use AbortSignal.any() instead + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal?.aborted) { + abortController.abort(); + } + else if (!abortSignal.aborted) { + inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } + finally { + inputAbortSignal?.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } + else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = undefined; + })), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } + else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful, + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + }, + }; + return poller; + }; +} +exports.buildCreatePoller = buildCreatePoller; +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js.map b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js.map new file mode 100644 index 0000000..4dc1bd7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAYlC,iDAAgF;AAChF,iDAAqD;AACrD,gDAAyC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT;;;;OAIG;IACH,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAAQ;IAC7D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,UAAU,CAAC;IACnD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,SAAS,CAAC;IACjD,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,WAAW,CAAC;IACrD,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC;IAE/C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,UAAU;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,QAAQ;IAC9C,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;IAChD,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,WAAW;CACrD,CAAC,CAAC;AAEH;;GAEG;AACH,SAAgB,iBAAiB,CAC/B,MAAmD;IAKnD,MAAM,EACJ,oBAAoB,EACpB,4BAA4B,EAC5B,yBAAyB,EACzB,gBAAgB,EAChB,mBAAmB,EACnB,kBAAkB,EAClB,QAAQ,EACR,qBAAqB,GACtB,GAAG,MAAM,CAAC;IACX,OAAO,KAAK,EACV,EAAE,IAAI,EAAE,IAAI,EAA2D,EACvE,OAAyD,EACzD,EAAE;QACF,MAAM,EACJ,aAAa,EACb,WAAW,EACX,qBAAqB,EAAE,6BAA6B,EACpD,YAAY,GAAG,kCAAmB,EAClC,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,MAAM,qBAAqB,GAAG,6BAA6B;YACzD,CAAC,CAAC,CAAC,GAAG,EAAE;gBACJ,IAAI,MAAM,GAAG,KAAK,CAAC;gBACnB,OAAO,CAAC,iBAAyB,EAAE,SAAkB,EAAE,EAAE;oBACvD,IAAI,SAAS;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;yBAC3D,IAAI,CAAC,MAAM;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;oBACnE,MAAM,GAAG,IAAI,CAAC;gBAChB,CAAC,CAAC;YACJ,CAAC,CAAC,EAAE;YACN,CAAC,CAAC,SAAS,CAAC;QACd,MAAM,KAAK,GAAqC,WAAW;YACzD,CAAC,CAAC,IAAA,+BAAgB,EAAC,WAAW,CAAC;YAC/B,CAAC,CAAC,MAAM,IAAA,4BAAa,EAAC;gBAClB,IAAI;gBACJ,UAAU;gBACV,aAAa;gBACb,kBAAkB,EAAE,4BAA4B;gBAChD,qBAAqB;gBACrB,gBAAgB,EAAE,CAAC,qBAAqB;aACzC,CAAC,CAAC;QACP,IAAI,aAA2C,CAAC;QAChD,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAG9C,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAmB,CAAC;QAC5C,MAAM,oBAAoB,GAAG,KAAK,IAAmB,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;QAC1F,MAAM,YAAY,GAAG,wBAAwB,CAAC;QAC9C,IAAI,uBAAuB,GAAG,YAAY,CAAC;QAE3C,MAAM,MAAM,GAAsC;YAChD,iBAAiB,EAAE,GAAG,EAAE,CAAC,KAAK;YAC9B,SAAS,EAAE,GAAG,EAAE,CAAC,KAAK,CAAC,MAAM;YAC7B,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,WAAW,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC;YACxE,SAAS,EAAE,GAAG,EAAE,CAAC,aAAa,KAAK,SAAS;YAC5C,WAAW,EAAE,GAAG,EAAE;gBAChB,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC;YACD,QAAQ,EAAE,GAAG,EAAE,CACb,IAAI,CAAC,SAAS,CAAC;gBACb,KAAK;aACN,CAAC;YACJ,UAAU,EAAE,CAAC,QAAiC,EAAE,EAAE;gBAChD,MAAM,CAAC,GAAG,MAAM,EAAE,CAAC;gBACnB,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;gBAC1B,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;YACD,aAAa,EAAE,CAAC,WAA+C,EAAE,EAAE,CACjE,CAAC,aAAa,KAAK,CAAC,KAAK,IAAI,EAAE;gBAC7B,MAAM,EAAE,WAAW,EAAE,gBAAgB,EAAE,GAAG,WAAW,IAAI,EAAE,CAAC;gBAC5D,qDAAqD;gBACrD,SAAS,aAAa;oBACpB,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;gBACD,MAAM,WAAW,GAAG,eAAe,CAAC,MAAM,CAAC;gBAC3C,IAAI,gBAAgB,EAAE,OAAO,EAAE,CAAC;oBAC9B,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;qBAAM,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;oBAChC,gBAAgB,EAAE,gBAAgB,CAAC,OAAO,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBAED,IAAI,CAAC;oBACH,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;wBACrB,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACnC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;4BACxB,MAAM,IAAA,iBAAK,EAAC,uBAAuB,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;4BACtD,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACrC,CAAC;oBACH,CAAC;gBACH,CAAC;wBAAS,CAAC;oBACT,gBAAgB,EAAE,mBAAmB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;gBAChE,CAAC;gBACD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;gBACvC,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;wBACvC,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;wBACpB,KAAK,YAAY,CAAC;wBAClB,KAAK,SAAS;4BACZ,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;oBACvE,CAAC;gBACH,CAAC;YACH,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE;gBAChB,aAAa,GAAG,SAAS,CAAC;YAC5B,CAAC,CAAC,CAAC;YACL,KAAK,CAAC,IAAI,CAAC,WAA+C;gBACxD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,IAAI,MAAM,CAAC,MAAM,EAAE;wBAAE,OAAO;gBAC9B,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO;wBACT,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;gBACD,MAAM,IAAA,4BAAa,EAAC;oBAClB,IAAI;oBACJ,KAAK;oBACL,UAAU;oBACV,oBAAoB;oBACpB,gBAAgB;oBAChB,qBAAqB;oBACrB,kBAAkB;oBAClB,kBAAkB,EAAE,yBAAyB;oBAC7C,mBAAmB;oBACnB,aAAa;oBACb,QAAQ;oBACR,WAAW;oBACX,OAAO,EAAE,WAAW;oBACpB,QAAQ,EAAE,CAAC,gBAAgB,EAAE,EAAE;wBAC7B,uBAAuB,GAAG,gBAAgB,CAAC;oBAC7C,CAAC;oBACD,gBAAgB,EAAE,CAAC,qBAAqB;iBACzC,CAAC,CAAC;gBACH,MAAM,oBAAoB,EAAE,CAAC;gBAC7B,IAAI,CAAC,qBAAqB,EAAE,CAAC;oBAC3B,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;YACH,CAAC;SACF,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC,CAAC;AACJ,CAAC;AAlKD,8CAkKC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n BuildCreatePollerOptions,\n CreatePollerOptions,\n Operation,\n OperationState,\n RestorableOperationState,\n SimplePollerLike,\n StateProxy,\n} from \"./models.js\";\nimport { deserializeState, initOperation, pollOperation } from \"./operation.js\";\nimport { POLL_INTERVAL_IN_MS } from \"./constants.js\";\nimport { delay } from \"@azure/core-util\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n /**\n * The state at this point is created to be of type OperationState.\n * It will be updated later to be of type TState when the\n * customer-provided callback, `updateState`, is called during polling.\n */\n initState: (config) => ({ status: \"running\", config }) as any,\n setCanceled: (state) => (state.status = \"canceled\"),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.status = \"running\"),\n setSucceeded: (state) => (state.status = \"succeeded\"),\n setFailed: (state) => (state.status = \"failed\"),\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => state.status === \"canceled\",\n isFailed: (state) => state.status === \"failed\",\n isRunning: (state) => state.status === \"running\",\n isSucceeded: (state) => state.status === \"succeeded\",\n});\n\n/**\n * Returns a poller factory.\n */\nexport function buildCreatePoller>(\n inputs: BuildCreatePollerOptions,\n): (\n lro: Operation,\n options?: CreatePollerOptions,\n) => Promise> {\n const {\n getOperationLocation,\n getStatusFromInitialResponse,\n getStatusFromPollResponse,\n isOperationError,\n getResourceLocation,\n getPollingInterval,\n getError,\n resolveOnUnsuccessful,\n } = inputs;\n return async (\n { init, poll }: Operation,\n options?: CreatePollerOptions,\n ) => {\n const {\n processResult,\n updateState,\n withOperationLocation: withOperationLocationCallback,\n intervalInMs = POLL_INTERVAL_IN_MS,\n restoreFrom,\n } = options || {};\n const stateProxy = createStateProxy();\n const withOperationLocation = withOperationLocationCallback\n ? (() => {\n let called = false;\n return (operationLocation: string, isUpdated: boolean) => {\n if (isUpdated) withOperationLocationCallback(operationLocation);\n else if (!called) withOperationLocationCallback(operationLocation);\n called = true;\n };\n })()\n : undefined;\n const state: RestorableOperationState = restoreFrom\n ? deserializeState(restoreFrom)\n : await initOperation({\n init,\n stateProxy,\n processResult,\n getOperationStatus: getStatusFromInitialResponse,\n withOperationLocation,\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n let resultPromise: Promise | undefined;\n const abortController = new AbortController();\n // Progress handlers\n type Handler = (state: TState) => void;\n const handlers = new Map();\n const handleProgressEvents = async (): Promise => handlers.forEach((h) => h(state));\n const cancelErrMsg = \"Operation was canceled\";\n let currentPollIntervalInMs = intervalInMs;\n\n const poller: SimplePollerLike = {\n getOperationState: () => state,\n getResult: () => state.result,\n isDone: () => [\"succeeded\", \"failed\", \"canceled\"].includes(state.status),\n isStopped: () => resultPromise === undefined,\n stopPolling: () => {\n abortController.abort();\n },\n toString: () =>\n JSON.stringify({\n state,\n }),\n onProgress: (callback: (state: TState) => void) => {\n const s = Symbol();\n handlers.set(s, callback);\n return () => handlers.delete(s);\n },\n pollUntilDone: (pollOptions?: { abortSignal?: AbortSignalLike }) =>\n (resultPromise ??= (async () => {\n const { abortSignal: inputAbortSignal } = pollOptions || {};\n // In the future we can use AbortSignal.any() instead\n function abortListener(): void {\n abortController.abort();\n }\n const abortSignal = abortController.signal;\n if (inputAbortSignal?.aborted) {\n abortController.abort();\n } else if (!abortSignal.aborted) {\n inputAbortSignal?.addEventListener(\"abort\", abortListener, { once: true });\n }\n\n try {\n if (!poller.isDone()) {\n await poller.poll({ abortSignal });\n while (!poller.isDone()) {\n await delay(currentPollIntervalInMs, { abortSignal });\n await poller.poll({ abortSignal });\n }\n }\n } finally {\n inputAbortSignal?.removeEventListener(\"abort\", abortListener);\n }\n if (resolveOnUnsuccessful) {\n return poller.getResult() as TResult;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return poller.getResult() as TResult;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n case \"notStarted\":\n case \"running\":\n throw new Error(`Polling completed without succeeding or failing`);\n }\n }\n })().finally(() => {\n resultPromise = undefined;\n })),\n async poll(pollOptions?: { abortSignal?: AbortSignalLike }): Promise {\n if (resolveOnUnsuccessful) {\n if (poller.isDone()) return;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n await pollOperation({\n poll,\n state,\n stateProxy,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n getOperationStatus: getStatusFromPollResponse,\n getResourceLocation,\n processResult,\n getError,\n updateState,\n options: pollOptions,\n setDelay: (pollIntervalInMs) => {\n currentPollIntervalInMs = pollIntervalInMs;\n },\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n await handleProgressEvents();\n if (!resolveOnUnsuccessful) {\n switch (state.status) {\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n },\n };\n return poller;\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-lro/dist/core-lro.d.ts b/node_modules/@azure/core-lro/dist/core-lro.d.ts new file mode 100644 index 0000000..552c5f1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/core-lro.d.ts @@ -0,0 +1,704 @@ +import { AbortSignalLike } from '@azure/abort-controller'; + +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export declare type CancelOnProgress = () => void; + +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export declare function createHttpPoller>(lro: LongRunningOperation, options?: CreateHttpPollerOptions): Promise>; + +/** + * Options for `createPoller`. + */ +export declare interface CreateHttpPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + resourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, response: LroResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} + +/** + * Description of a long running operation. + */ +export declare interface LongRunningOperation { + /** + * The request path. This should be set if the operation is a PUT and needs + * to poll from the same request path. + */ + requestPath?: string; + /** + * The HTTP request method. This should be set if the operation is a PUT or a + * DELETE. + */ + requestMethod?: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string, options?: { + abortSignal?: AbortSignalLike; + }) => Promise>; +} + +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} + +/** + * Options for the LRO poller. + */ +export declare interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} + +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export declare type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; + +/** + * The type of the response of a LRO. + */ +export declare interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} + +/** + * While the poller works as the local control mechanism to start triggering and + * wait for a long-running operation, OperationState documents the status of + * the remote long-running operation. It gets updated after each poll. + */ +export declare interface OperationState { + /** + * The current status of the operation. + */ + status: OperationStatus; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation produced a result of the expected type. + */ + result?: TResult; +} + +/** + * The set of possible states an operation can be in at any given time. + */ +export declare type OperationStatus = "notStarted" | "running" | "succeeded" | "canceled" | "failed"; + +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + /** controls whether to throw an error if the operation failed or was canceled. */ + protected resolveOnUnsuccessful: boolean; + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-util"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + private processUpdatedState; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} + +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export declare interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * @deprecated `cancelOperation` has been deprecated because it was not implemented. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} + +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export declare interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + * + * @deprecated `cancel` has been deprecated because it was not implemented. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} + +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export declare interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} + +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export declare type PollProgressCallback = (state: TState) => void; + +/** + * Simple type of the raw response. + */ +export declare interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} + +/** + * A simple poller interface. + */ +export declare interface SimplePollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Returns the state of the operation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +export { } diff --git a/node_modules/@azure/core-lro/dist/esm/http/models.d.ts b/node_modules/@azure/core-lro/dist/esm/http/models.d.ts new file mode 100644 index 0000000..c6ebb32 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/models.d.ts @@ -0,0 +1,106 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { LroError } from "../poller/models.js"; +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; +/** + * The type of a LRO response body. This is just a convenience type for checking the status of the operation. + */ +export interface ResponseBody extends Record { + /** The status of the operation. */ + status?: unknown; + /** The state of the provisioning process */ + provisioningState?: unknown; + /** The properties of the provisioning process */ + properties?: { + provisioningState?: unknown; + } & Record; + /** The error if the operation failed */ + error?: Partial; + /** The location of the created resource */ + resourceLocation?: string; +} +/** + * Simple type of the raw response. + */ +export interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} +/** + * The type of the response of a LRO. + */ +export interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} +/** + * Description of a long running operation. + */ +export interface LongRunningOperation { + /** + * The request path. This should be set if the operation is a PUT and needs + * to poll from the same request path. + */ + requestPath?: string; + /** + * The HTTP request method. This should be set if the operation is a PUT or a + * DELETE. + */ + requestMethod?: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string, options?: { + abortSignal?: AbortSignalLike; + }) => Promise>; +} +export type HttpOperationMode = "OperationLocation" | "ResourceLocation" | "Body"; +/** + * Options for `createPoller`. + */ +export interface CreateHttpPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + resourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, response: LroResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/models.d.ts.map b/node_modules/@azure/core-lro/dist/esm/http/models.d.ts.map new file mode 100644 index 0000000..8c69ed8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAG/C;;GAEG;AACH,MAAM,MAAM,yBAAyB,GAAG,uBAAuB,GAAG,UAAU,GAAG,cAAc,CAAC;AAE9F;;GAEG;AAEH,MAAM,WAAW,YAAa,SAAQ,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAC3D,mCAAmC;IACnC,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,4CAA4C;IAC5C,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,iDAAiD;IACjD,UAAU,CAAC,EAAE;QAAE,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvE,wCAAwC;IACxC,KAAK,CAAC,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC;IAC1B,2CAA2C;IAC3C,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,2BAA2B;IAC3B,UAAU,EAAE,MAAM,CAAC;IACnB,iJAAiJ;IACjJ,OAAO,EAAE;QACP,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAC;KAC9B,CAAC;IACF,+BAA+B;IAC/B,IAAI,CAAC,EAAE,OAAO,CAAC;CAChB;AAGD;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,CAAC,GAAG,OAAO;IACtC,6BAA6B;IAC7B,YAAY,EAAE,CAAC,CAAC;IAChB,uBAAuB;IACvB,WAAW,EAAE,WAAW,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB,CAAC,CAAC,GAAG,OAAO;IAC/C;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,kBAAkB,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACxD;;OAEG;IACH,eAAe,EAAE,CACf,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,KACxC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;CAC9B;AAED,MAAM,MAAM,iBAAiB,GAAG,mBAAmB,GAAG,kBAAkB,GAAG,MAAM,CAAC;AAElF;;GAEG;AACH,MAAM,WAAW,uBAAuB,CAAC,OAAO,EAAE,MAAM;IACtD;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,WAAW,KAAK,IAAI,CAAC;IAC7D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;IAC5D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/models.js b/node_modules/@azure/core-lro/dist/esm/http/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/models.js.map b/node_modules/@azure/core-lro/dist/esm/http/models.js.map new file mode 100644 index 0000000..ccc0d92 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { LroError } from \"../poller/models.js\";\n\n// TODO: rename to ResourceLocationConfig\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface ResponseBody extends Record {\n /** The status of the operation. */\n status?: unknown;\n /** The state of the provisioning process */\n provisioningState?: unknown;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: unknown } & Record;\n /** The error if the operation failed */\n error?: Partial;\n /** The location of the created resource */\n resourceLocation?: string;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n// TODO: rename to OperationResponse\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path. This should be set if the operation is a PUT and needs\n * to poll from the same request path.\n */\n requestPath?: string;\n /**\n * The HTTP request method. This should be set if the operation is a PUT or a\n * DELETE.\n */\n requestMethod?: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (\n path: string,\n options?: { abortSignal?: AbortSignalLike },\n ) => Promise>;\n}\n\nexport type HttpOperationMode = \"OperationLocation\" | \"ResourceLocation\" | \"Body\";\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreateHttpPollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n resourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, response: LroResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/operation.d.ts b/node_modules/@azure/core-lro/dist/esm/http/operation.d.ts new file mode 100644 index 0000000..c06358e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/operation.d.ts @@ -0,0 +1,47 @@ +import { HttpOperationMode, LongRunningOperation, LroResourceLocationConfig, LroResponse, RawResponse } from "./models.js"; +import { LroError, OperationConfig, OperationStatus, RestorableOperationState, StateProxy } from "../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +export declare function inferLroMode(inputs: { + rawResponse: RawResponse; + requestPath?: string; + requestMethod?: string; + resourceLocationConfig?: LroResourceLocationConfig; +}): (OperationConfig & { + mode: HttpOperationMode; +}) | undefined; +export declare function parseRetryAfter({ rawResponse }: LroResponse): number | undefined; +export declare function getErrorFromResponse(response: LroResponse): LroError | undefined; +export declare function getStatusFromInitialResponse(inputs: { + response: LroResponse; + state: RestorableOperationState; + operationLocation?: string; +}): OperationStatus; +/** + * Initiates the long-running operation. + */ +export declare function initHttpOperation(inputs: { + stateProxy: StateProxy; + resourceLocationConfig?: LroResourceLocationConfig; + processResult?: (result: unknown, state: TState) => TResult; + setErrorAsResult: boolean; + lro: LongRunningOperation; +}): Promise>; +export declare function getOperationLocation({ rawResponse }: LroResponse, state: RestorableOperationState): string | undefined; +export declare function getOperationStatus({ rawResponse }: LroResponse, state: RestorableOperationState): OperationStatus; +export declare function getResourceLocation(res: LroResponse, state: RestorableOperationState): string | undefined; +export declare function isOperationError(e: Error): boolean; +/** Polls the long-running operation. */ +export declare function pollHttpOperation(inputs: { + lro: LongRunningOperation; + stateProxy: StateProxy; + processResult?: (result: unknown, state: TState) => TResult; + updateState?: (state: TState, lastResponse: LroResponse) => void; + isDone?: (lastResponse: LroResponse, state: TState) => boolean; + setDelay: (intervalInMs: number) => void; + options?: { + abortSignal?: AbortSignalLike; + }; + state: RestorableOperationState; + setErrorAsResult: boolean; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/operation.d.ts.map b/node_modules/@azure/core-lro/dist/esm/http/operation.d.ts.map new file mode 100644 index 0000000..39d679a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,yBAAyB,EACzB,WAAW,EACX,WAAW,EAEZ,MAAM,aAAa,CAAC;AACrB,OAAO,EACL,QAAQ,EACR,eAAe,EACf,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AA6D1D,wBAAgB,YAAY,CAAC,MAAM,EAAE;IACnC,WAAW,EAAE,WAAW,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;CACpD,GAAG,CAAC,eAAe,GAAG;IAAE,IAAI,EAAE,iBAAiB,CAAA;CAAE,CAAC,GAAG,SAAS,CA+B9D;AAqDD,wBAAgB,eAAe,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,SAAS,CAUtF;AAED,wBAAgB,oBAAoB,CAAC,CAAC,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,QAAQ,GAAG,SAAS,CAetF;AAWD,wBAAgB,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE;IAC3D,QAAQ,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B,GAAG,eAAe,CAelB;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IAC/D,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,GAAG,EAAE,oBAAoB,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAyB5C;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EACzC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAiBpB;AAED,wBAAgB,kBAAkB,CAAC,MAAM,EACvC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,eAAe,CAejB;AASD,wBAAgB,mBAAmB,CAAC,MAAM,EACxC,GAAG,EAAE,WAAW,EAChB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAMpB;AAED,wBAAgB,gBAAgB,CAAC,CAAC,EAAE,KAAK,GAAG,OAAO,CAElD;AAED,wCAAwC;AACxC,wBAAsB,iBAAiB,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE;IAC/D,GAAG,EAAE,oBAAoB,CAAC;IAC1B,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC/D,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,CAAC;IAC5C,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,IAAI,CAAC,CAkChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/operation.js b/node_modules/@azure/core-lro/dist/esm/http/operation.js new file mode 100644 index 0000000..0383844 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/operation.js @@ -0,0 +1,280 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { initOperation, pollOperation } from "../poller/operation.js"; +import { logger } from "../logger.js"; +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation ?? azureAsyncOperation; +} +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(inputs) { + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return getDefault() ?? requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } +} +export function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; + } + else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath, + }; + } + else { + return undefined; + } +} +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status?.toLocaleLowerCase()) { + case undefined: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } +} +function getStatus(rawResponse) { + const { status } = rawResponse.body ?? {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function getProvisioningState(rawResponse) { + const { properties, provisioningState } = rawResponse.body ?? {}; + const status = properties?.provisioningState ?? provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } + else if (statusCode < 300) { + return "succeeded"; + } + else { + return "failed"; + } +} +export function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter)) + : retryAfterInSeconds * 1000; + } + return undefined; +} +export function getErrorFromResponse(response) { + const error = accessBodyProperty(response, "error"); + if (!error) { + logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; +} +function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return undefined; +} +export function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} +/** + * Initiates the long-running operation. + */ +export async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return initOperation({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + stateProxy, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, + }); +} +export function getOperationLocation({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return undefined; + } + } +} +export function getOperationStatus({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } +} +function accessBodyProperty({ flatResponse, rawResponse }, prop) { + return flatResponse?.[prop] ?? rawResponse.body?.[prop]; +} +export function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; +} +export function isOperationError(e) { + return e.name === "RestError"; +} +/** Polls the long-running operation. */ +export async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; + return pollOperation({ + state, + stateProxy, + setDelay, + processResult: processResult + ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) + : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, + }); +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/operation.js.map b/node_modules/@azure/core-lro/dist/esm/http/operation.js.map new file mode 100644 index 0000000..fa0dde7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAiBlC,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,wBAAwB,CAAC;AAEtE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,SAAS,8BAA8B,CAAC,MAGvC;IACC,MAAM,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IAC1D,OAAO,iBAAiB,IAAI,mBAAmB,CAAC;AAClD,CAAC;AAED,SAAS,iBAAiB,CAAC,WAAwB;IACjD,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,0BAA0B,CAAC,WAAwB;IAC1D,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,4BAA4B,CAAC,WAAwB;IAC5D,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAAC,MAK7B;IACC,MAAM,EAAE,QAAQ,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IAChF,QAAQ,aAAa,EAAE,CAAC;QACtB,KAAK,KAAK,CAAC,CAAC,CAAC;YACX,OAAO,WAAW,CAAC;QACrB,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,KAAK,OAAO,CAAC,CAAC,CAAC;YACb,OAAO,UAAU,EAAE,IAAI,WAAW,CAAC;QACrC,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,UAAU,EAAE,CAAC;QACtB,CAAC;IACH,CAAC;IAED,SAAS,UAAU;QACjB,QAAQ,sBAAsB,EAAE,CAAC;YAC/B,KAAK,uBAAuB,CAAC,CAAC,CAAC;gBAC7B,OAAO,SAAS,CAAC;YACnB,CAAC;YACD,KAAK,cAAc,CAAC,CAAC,CAAC;gBACpB,OAAO,WAAW,CAAC;YACrB,CAAC;YACD,KAAK,UAAU,CAAC;YAChB,OAAO,CAAC,CAAC,CAAC;gBACR,OAAO,QAAQ,CAAC;YAClB,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,MAK5B;IACC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IACnF,MAAM,iBAAiB,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAC;IAClE,MAAM,mBAAmB,GAAG,4BAA4B,CAAC,WAAW,CAAC,CAAC;IACtE,MAAM,UAAU,GAAG,8BAA8B,CAAC,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC9F,MAAM,QAAQ,GAAG,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,uBAAuB,GAAG,aAAa,EAAE,iBAAiB,EAAE,CAAC;IACnE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,iBAAiB,EAAE,UAAU;YAC7B,gBAAgB,EAAE,oBAAoB,CAAC;gBACrC,aAAa,EAAE,uBAAuB;gBACtC,QAAQ;gBACR,WAAW;gBACX,sBAAsB;aACvB,CAAC;SACH,CAAC;IACJ,CAAC;SAAM,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,kBAAkB;YACxB,iBAAiB,EAAE,QAAQ;SAC5B,CAAC;IACJ,CAAC;SAAM,IAAI,uBAAuB,KAAK,KAAK,IAAI,WAAW,EAAE,CAAC;QAC5D,OAAO;YACL,IAAI,EAAE,MAAM;YACZ,iBAAiB,EAAE,WAAW;SAC/B,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,OAAO,SAAS,CAAC;IACnB,CAAC;AACH,CAAC;AAED,SAAS,eAAe,CAAC,MAA+C;IACtE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,MAAM,CAAC;IACtC,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACvD,MAAM,IAAI,KAAK,CACb,oGAAoG,MAAM,sIAAsI,CACjP,CAAC;IACJ,CAAC;IACD,QAAQ,MAAM,EAAE,iBAAiB,EAAE,EAAE,CAAC;QACpC,KAAK,SAAS;YACZ,OAAO,iBAAiB,CAAC,UAAU,CAAC,CAAC;QACvC,KAAK,WAAW;YACd,OAAO,WAAW,CAAC;QACrB,KAAK,QAAQ;YACX,OAAO,QAAQ,CAAC;QAClB,KAAK,SAAS,CAAC;QACf,KAAK,UAAU,CAAC;QAChB,KAAK,SAAS,CAAC;QACf,KAAK,WAAW,CAAC;QACjB,KAAK,YAAY;YACf,OAAO,SAAS,CAAC;QACnB,KAAK,UAAU,CAAC;QAChB,KAAK,WAAW;YACd,OAAO,UAAU,CAAC;QACpB,OAAO,CAAC,CAAC,CAAC;YACR,MAAM,CAAC,OAAO,CAAC,uCAAuC,MAAM,EAAE,CAAC,CAAC;YAChE,OAAO,MAAyB,CAAC;QACnC,CAAC;IACH,CAAC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,WAAwB;IACzC,MAAM,EAAE,MAAM,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IAC5D,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB;IACpD,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IACnF,MAAM,MAAM,GAAG,UAAU,EAAE,iBAAiB,IAAI,iBAAiB,CAAC;IAClE,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB;IAC3C,IAAI,UAAU,KAAK,GAAG,EAAE,CAAC;QACvB,OAAO,SAAS,CAAC;IACnB,CAAC;SAAM,IAAI,UAAU,GAAG,GAAG,EAAE,CAAC;QAC5B,OAAO,WAAW,CAAC;IACrB,CAAC;SAAM,CAAC;QACN,OAAO,QAAQ,CAAC;IAClB,CAAC;AACH,CAAC;AAED,MAAM,UAAU,eAAe,CAAI,EAAE,WAAW,EAAkB;IAChE,MAAM,UAAU,GAAuB,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;IAC1E,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,wEAAwE;QACxE,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;QACjD,OAAO,KAAK,CAAC,mBAAmB,CAAC;YAC/B,CAAC,CAAC,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC;YACxD,CAAC,CAAC,mBAAmB,GAAG,IAAI,CAAC;IACjC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAI,QAAwB;IAC9D,MAAM,KAAK,GAAG,kBAAkB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACpD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,MAAM,CAAC,OAAO,CACZ,yFAAyF,CAC1F,CAAC;QACF,OAAO;IACT,CAAC;IACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC;QAClC,MAAM,CAAC,OAAO,CACZ,iHAAiH,CAClH,CAAC;QACF,OAAO;IACT,CAAC;IACD,OAAO,KAAiB,CAAC;AAC3B,CAAC;AAED,SAAS,gCAAgC,CAAC,cAAoB;IAC5D,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE,CAAC;QAC7B,OAAO,cAAc,GAAG,OAAO,CAAC;IAClC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,4BAA4B,CAAS,MAIpD;IACC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IACtD,SAAS,MAAM;QACb,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;QAC7C,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,SAAS;gBACZ,OAAO,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC5D,KAAK,MAAM;gBACT,OAAO,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YAC7C;gBACE,OAAO,SAAS,CAAC;QACrB,CAAC;IACH,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC;IACxB,OAAO,MAAM,KAAK,SAAS,IAAI,iBAAiB,KAAK,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC;AACxF,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAkB,MAMxD;IACC,MAAM,EAAE,UAAU,EAAE,sBAAsB,EAAE,aAAa,EAAE,GAAG,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IAC5F,OAAO,aAAa,CAAC;QACnB,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,UAAU;QACV,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,kBAAkB,EAAE,4BAA4B;QAChD,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,8BAA8B,CAAC;gBACpC,iBAAiB,EAAE,0BAA0B,CAAC,WAAW,CAAC;gBAC1D,mBAAmB,EAAE,4BAA4B,CAAC,WAAW,CAAC;aAC/D,CAAC,CAAC;QACL,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,CAAC;QACxC,CAAC;QACD,KAAK,MAAM,CAAC;QACZ,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,SAAS,CAAC;QACnB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,kBAAkB,CAChC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,SAAS,CAAC,WAAW,CAAC,CAAC;QAChC,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,OAAO,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAC3C,CAAC;QACD;YACE,MAAM,IAAI,KAAK,CAAC,8CAA8C,IAAI,EAAE,CAAC,CAAC;IAC1E,CAAC;AACH,CAAC;AAED,SAAS,kBAAkB,CACzB,EAAE,YAAY,EAAE,WAAW,EAAe,EAC1C,IAAO;IAEP,OAAQ,YAA6B,EAAE,CAAC,IAAI,CAAC,IAAK,WAAW,CAAC,IAAqB,EAAE,CAAC,IAAI,CAAC,CAAC;AAC9F,CAAC;AAED,MAAM,UAAU,mBAAmB,CACjC,GAAgB,EAChB,KAAuC;IAEvC,MAAM,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC;IACxD,IAAI,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE,CAAC;QACnC,KAAK,CAAC,MAAM,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACtC,CAAC;IACD,OAAO,KAAK,CAAC,MAAM,CAAC,gBAAgB,CAAC;AACvC,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,CAAQ;IACvC,OAAO,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC;AAChC,CAAC;AAED,wCAAwC;AACxC,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAkB,MAUxD;IACC,MAAM,EACJ,GAAG,EACH,UAAU,EACV,OAAO,EACP,aAAa,EACb,WAAW,EACX,QAAQ,EACR,KAAK,EACL,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,OAAO,aAAa,CAAC;QACnB,KAAK;QACL,UAAU;QACV,QAAQ;QACR,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,UAAU,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,UAAU,CAAC;YAC3E,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,QAAQ,EAAE,oBAAoB;QAC9B,WAAW;QACX,kBAAkB,EAAE,eAAe;QACnC,oBAAoB;QACpB,kBAAkB;QAClB,gBAAgB;QAChB,mBAAmB;QACnB,OAAO;QACP;;;WAGG;QACH,IAAI,EAAE,KAAK,EAAE,QAAgB,EAAE,YAAgD,EAAE,EAAE,CACjF,GAAG,CAAC,eAAe,CAAC,QAAQ,EAAE,YAAY,CAAC;QAC7C,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n HttpOperationMode,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n RawResponse,\n ResponseBody,\n} from \"./models.js\";\nimport {\n LroError,\n OperationConfig,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"../poller/models.js\";\nimport { initOperation, pollOperation } from \"../poller/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { logger } from \"../logger.js\";\n\nfunction getOperationLocationPollingUrl(inputs: {\n operationLocation?: string;\n azureAsyncOperation?: string;\n}): string | undefined {\n const { azureAsyncOperation, operationLocation } = inputs;\n return operationLocation ?? azureAsyncOperation;\n}\n\nfunction getLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(inputs: {\n requestMethod?: string;\n location?: string;\n requestPath?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): string | undefined {\n const { location, requestMethod, requestPath, resourceLocationConfig } = inputs;\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"DELETE\": {\n return undefined;\n }\n case \"PATCH\": {\n return getDefault() ?? requestPath;\n }\n default: {\n return getDefault();\n }\n }\n\n function getDefault() {\n switch (resourceLocationConfig) {\n case \"azure-async-operation\": {\n return undefined;\n }\n case \"original-uri\": {\n return requestPath;\n }\n case \"location\":\n default: {\n return location;\n }\n }\n }\n}\n\nexport function inferLroMode(inputs: {\n rawResponse: RawResponse;\n requestPath?: string;\n requestMethod?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): (OperationConfig & { mode: HttpOperationMode }) | undefined {\n const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs;\n const operationLocation = getOperationLocationHeader(rawResponse);\n const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse);\n const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation });\n const location = getLocationHeader(rawResponse);\n const normalizedRequestMethod = requestMethod?.toLocaleUpperCase();\n if (pollingUrl !== undefined) {\n return {\n mode: \"OperationLocation\",\n operationLocation: pollingUrl,\n resourceLocation: findResourceLocation({\n requestMethod: normalizedRequestMethod,\n location,\n requestPath,\n resourceLocationConfig,\n }),\n };\n } else if (location !== undefined) {\n return {\n mode: \"ResourceLocation\",\n operationLocation: location,\n };\n } else if (normalizedRequestMethod === \"PUT\" && requestPath) {\n return {\n mode: \"Body\",\n operationLocation: requestPath,\n };\n } else {\n return undefined;\n }\n}\n\nfunction transformStatus(inputs: { status: unknown; statusCode: number }): OperationStatus {\n const { status, statusCode } = inputs;\n if (typeof status !== \"string\" && status !== undefined) {\n throw new Error(\n `Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`,\n );\n }\n switch (status?.toLocaleLowerCase()) {\n case undefined:\n return toOperationStatus(statusCode);\n case \"succeeded\":\n return \"succeeded\";\n case \"failed\":\n return \"failed\";\n case \"running\":\n case \"accepted\":\n case \"started\":\n case \"canceling\":\n case \"cancelling\":\n return \"running\";\n case \"canceled\":\n case \"cancelled\":\n return \"canceled\";\n default: {\n logger.verbose(`LRO: unrecognized operation status: ${status}`);\n return status as OperationStatus;\n }\n }\n}\n\nfunction getStatus(rawResponse: RawResponse): OperationStatus {\n const { status } = (rawResponse.body as ResponseBody) ?? {};\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction getProvisioningState(rawResponse: RawResponse): OperationStatus {\n const { properties, provisioningState } = (rawResponse.body as ResponseBody) ?? {};\n const status = properties?.provisioningState ?? provisioningState;\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction toOperationStatus(statusCode: number): OperationStatus {\n if (statusCode === 202) {\n return \"running\";\n } else if (statusCode < 300) {\n return \"succeeded\";\n } else {\n return \"failed\";\n }\n}\n\nexport function parseRetryAfter({ rawResponse }: LroResponse): number | undefined {\n const retryAfter: string | undefined = rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n return isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter))\n : retryAfterInSeconds * 1000;\n }\n return undefined;\n}\n\nexport function getErrorFromResponse(response: LroResponse): LroError | undefined {\n const error = accessBodyProperty(response, \"error\");\n if (!error) {\n logger.warning(\n `The long-running operation failed but there is no error property in the response's body`,\n );\n return;\n }\n if (!error.code || !error.message) {\n logger.warning(\n `The long-running operation failed but the error property in the response's body doesn't contain code or message`,\n );\n return;\n }\n return error as LroError;\n}\n\nfunction calculatePollingIntervalFromDate(retryAfterDate: Date): number | undefined {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return undefined;\n}\n\nexport function getStatusFromInitialResponse(inputs: {\n response: LroResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n}): OperationStatus {\n const { response, state, operationLocation } = inputs;\n function helper(): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case undefined:\n return toOperationStatus(response.rawResponse.statusCode);\n case \"Body\":\n return getOperationStatus(response, state);\n default:\n return \"running\";\n }\n }\n const status = helper();\n return status === \"running\" && operationLocation === undefined ? \"succeeded\" : status;\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initHttpOperation(inputs: {\n stateProxy: StateProxy;\n resourceLocationConfig?: LroResourceLocationConfig;\n processResult?: (result: unknown, state: TState) => TResult;\n setErrorAsResult: boolean;\n lro: LongRunningOperation;\n}): Promise> {\n const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs;\n return initOperation({\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n stateProxy,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n getOperationStatus: getStatusFromInitialResponse,\n setErrorAsResult,\n });\n}\n\nexport function getOperationLocation(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getOperationLocationPollingUrl({\n operationLocation: getOperationLocationHeader(rawResponse),\n azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse),\n });\n }\n case \"ResourceLocation\": {\n return getLocationHeader(rawResponse);\n }\n case \"Body\":\n default: {\n return undefined;\n }\n }\n}\n\nexport function getOperationStatus(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getStatus(rawResponse);\n }\n case \"ResourceLocation\": {\n return toOperationStatus(rawResponse.statusCode);\n }\n case \"Body\": {\n return getProvisioningState(rawResponse);\n }\n default:\n throw new Error(`Internal error: Unexpected operation mode: ${mode}`);\n }\n}\n\nfunction accessBodyProperty

(\n { flatResponse, rawResponse }: LroResponse,\n prop: P,\n): ResponseBody[P] {\n return (flatResponse as ResponseBody)?.[prop] ?? (rawResponse.body as ResponseBody)?.[prop];\n}\n\nexport function getResourceLocation(\n res: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const loc = accessBodyProperty(res, \"resourceLocation\");\n if (loc && typeof loc === \"string\") {\n state.config.resourceLocation = loc;\n }\n return state.config.resourceLocation;\n}\n\nexport function isOperationError(e: Error): boolean {\n return e.name === \"RestError\";\n}\n\n/** Polls the long-running operation. */\nexport async function pollHttpOperation(inputs: {\n lro: LongRunningOperation;\n stateProxy: StateProxy;\n processResult?: (result: unknown, state: TState) => TResult;\n updateState?: (state: TState, lastResponse: LroResponse) => void;\n isDone?: (lastResponse: LroResponse, state: TState) => boolean;\n setDelay: (intervalInMs: number) => void;\n options?: { abortSignal?: AbortSignalLike };\n state: RestorableOperationState;\n setErrorAsResult: boolean;\n}): Promise {\n const {\n lro,\n stateProxy,\n options,\n processResult,\n updateState,\n setDelay,\n state,\n setErrorAsResult,\n } = inputs;\n return pollOperation({\n state,\n stateProxy,\n setDelay,\n processResult: processResult\n ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState)\n : ({ flatResponse }) => flatResponse as TResult,\n getError: getErrorFromResponse,\n updateState,\n getPollingInterval: parseRetryAfter,\n getOperationLocation,\n getOperationStatus,\n isOperationError,\n getResourceLocation,\n options,\n /**\n * The expansion here is intentional because `lro` could be an object that\n * references an inner this, so we need to preserve a reference to it.\n */\n poll: async (location: string, inputOptions?: { abortSignal?: AbortSignalLike }) =>\n lro.sendPollRequest(location, inputOptions),\n setErrorAsResult,\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/poller.d.ts b/node_modules/@azure/core-lro/dist/esm/http/poller.d.ts new file mode 100644 index 0000000..5199c5c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/poller.d.ts @@ -0,0 +1,11 @@ +import { LongRunningOperation } from "./models.js"; +import { OperationState, SimplePollerLike } from "../poller/models.js"; +import { CreateHttpPollerOptions } from "./models.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export declare function createHttpPoller>(lro: LongRunningOperation, options?: CreateHttpPollerOptions): Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/poller.d.ts.map b/node_modules/@azure/core-lro/dist/esm/http/poller.d.ts.map new file mode 100644 index 0000000..5f6cfe9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAe,MAAM,aAAa,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAWvE,OAAO,EAAE,uBAAuB,EAAE,MAAM,aAAa,CAAC;AAGtD;;;;;GAKG;AACH,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EACpF,GAAG,EAAE,oBAAoB,EACzB,OAAO,CAAC,EAAE,uBAAuB,CAAC,OAAO,EAAE,MAAM,CAAC,GACjD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAgD5C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/poller.js b/node_modules/@azure/core-lro/dist/esm/http/poller.js new file mode 100644 index 0000000..84379fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/poller.js @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getErrorFromResponse, getOperationLocation, getOperationStatus, getResourceLocation, getStatusFromInitialResponse, inferLroMode, isOperationError, parseRetryAfter, } from "./operation.js"; +import { buildCreatePoller } from "../poller/poller.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; + return buildCreatePoller({ + getStatusFromInitialResponse, + getStatusFromPollResponse: getOperationStatus, + isOperationError, + getOperationLocation, + getResourceLocation, + getPollingInterval: parseRetryAfter, + getError: getErrorFromResponse, + resolveOnUnsuccessful, + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + poll: lro.sendPollRequest, + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + }); +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/http/poller.js.map b/node_modules/@azure/core-lro/dist/esm/http/poller.js.map new file mode 100644 index 0000000..6a95868 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/http/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EACL,oBAAoB,EACpB,oBAAoB,EACpB,kBAAkB,EAClB,mBAAmB,EACnB,4BAA4B,EAC5B,YAAY,EACZ,gBAAgB,EAChB,eAAe,GAChB,MAAM,gBAAgB,CAAC;AAExB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,GAAyB,EACzB,OAAkD;IAElD,MAAM,EACJ,sBAAsB,EACtB,YAAY,EACZ,aAAa,EACb,WAAW,EACX,WAAW,EACX,qBAAqB,EACrB,qBAAqB,GAAG,KAAK,GAC9B,GAAG,OAAO,IAAI,EAAE,CAAC;IAClB,OAAO,iBAAiB,CAA+B;QACrD,4BAA4B;QAC5B,yBAAyB,EAAE,kBAAkB;QAC7C,gBAAgB;QAChB,oBAAoB;QACpB,mBAAmB;QACnB,kBAAkB,EAAE,eAAe;QACnC,QAAQ,EAAE,oBAAoB;QAC9B,qBAAqB;KACtB,CAAC,CACA;QACE,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,IAAI,EAAE,GAAG,CAAC,eAAe;KAC1B,EACD;QACE,YAAY;QACZ,qBAAqB;QACrB,WAAW;QACX,WAAW;QACX,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;KAClD,CACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResponse } from \"./models.js\";\nimport { OperationState, SimplePollerLike } from \"../poller/models.js\";\nimport {\n getErrorFromResponse,\n getOperationLocation,\n getOperationStatus,\n getResourceLocation,\n getStatusFromInitialResponse,\n inferLroMode,\n isOperationError,\n parseRetryAfter,\n} from \"./operation.js\";\nimport { CreateHttpPollerOptions } from \"./models.js\";\nimport { buildCreatePoller } from \"../poller/poller.js\";\n\n/**\n * Creates a poller that can be used to poll a long-running operation.\n * @param lro - Description of the long-running operation\n * @param options - options to configure the poller\n * @returns an initialized poller\n */\nexport async function createHttpPoller>(\n lro: LongRunningOperation,\n options?: CreateHttpPollerOptions,\n): Promise> {\n const {\n resourceLocationConfig,\n intervalInMs,\n processResult,\n restoreFrom,\n updateState,\n withOperationLocation,\n resolveOnUnsuccessful = false,\n } = options || {};\n return buildCreatePoller({\n getStatusFromInitialResponse,\n getStatusFromPollResponse: getOperationStatus,\n isOperationError,\n getOperationLocation,\n getResourceLocation,\n getPollingInterval: parseRetryAfter,\n getError: getErrorFromResponse,\n resolveOnUnsuccessful,\n })(\n {\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n poll: lro.sendPollRequest,\n },\n {\n intervalInMs,\n withOperationLocation,\n restoreFrom,\n updateState,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/index.d.ts b/node_modules/@azure/core-lro/dist/esm/index.d.ts new file mode 100644 index 0000000..9783948 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/index.d.ts @@ -0,0 +1,13 @@ +export { createHttpPoller } from "./http/poller.js"; +export { CancelOnProgress, OperationState, OperationStatus, SimplePollerLike, } from "./poller/models.js"; +export { CreateHttpPollerOptions } from "./http/models.js"; +export { LroResourceLocationConfig, LongRunningOperation, LroResponse, RawResponse, } from "./http/models.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +export { PollerLike } from "./legacy/models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/index.d.ts.map b/node_modules/@azure/core-lro/dist/esm/index.d.ts.map new file mode 100644 index 0000000..c096dc1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,cAAc,EACd,eAAe,EACf,gBAAgB,GACjB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC;AAC3D,OAAO,EACL,yBAAyB,EACzB,oBAAoB,EACpB,WAAW,EACX,WAAW,GACZ,MAAM,kBAAkB,CAAC;AAE1B;;GAEG;AAUH,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/index.js b/node_modules/@azure/core-lro/dist/esm/index.js new file mode 100644 index 0000000..3952c2a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/index.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createHttpPoller } from "./http/poller.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +// export { +// BuildCreatePollerOptions, +// Operation, +// CreatePollerOptions, +// OperationConfig, +// RestorableOperationState, +// } from "./poller/models"; +// export { buildCreatePoller } from "./poller/poller"; +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/index.js.map b/node_modules/@azure/core-lro/dist/esm/index.js.map new file mode 100644 index 0000000..689d59e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AAepD;;GAEG;AACH,WAAW;AACX,8BAA8B;AAC9B,eAAe;AACf,yBAAyB;AACzB,qBAAqB;AACrB,8BAA8B;AAC9B,4BAA4B;AAC5B,uDAAuD;AAEvD,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { createHttpPoller } from \"./http/poller.js\";\nexport {\n CancelOnProgress,\n OperationState,\n OperationStatus,\n SimplePollerLike,\n} from \"./poller/models.js\";\nexport { CreateHttpPollerOptions } from \"./http/models.js\";\nexport {\n LroResourceLocationConfig,\n LongRunningOperation,\n LroResponse,\n RawResponse,\n} from \"./http/models.js\";\n\n/**\n * This can be uncommented to expose the protocol-agnostic poller\n */\n// export {\n// BuildCreatePollerOptions,\n// Operation,\n// CreatePollerOptions,\n// OperationConfig,\n// RestorableOperationState,\n// } from \"./poller/models\";\n// export { buildCreatePoller } from \"./poller/poller\";\n\n/** legacy */\nexport * from \"./legacy/lroEngine/index.js\";\nexport * from \"./legacy/poller.js\";\nexport * from \"./legacy/pollOperation.js\";\nexport { PollerLike } from \"./legacy/models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.d.ts new file mode 100644 index 0000000..b0d84c8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.d.ts @@ -0,0 +1,3 @@ +export { LroEngine } from "./lroEngine.js"; +export { LroEngineOptions } from "./models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.d.ts.map new file mode 100644 index 0000000..ebf1159 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.js b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.js new file mode 100644 index 0000000..ec17805 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { LroEngine } from "./lroEngine.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.js.map new file mode 100644 index 0000000..0c4dc7a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { LroEngine } from \"./lroEngine.js\";\nexport { LroEngineOptions } from \"./models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.d.ts new file mode 100644 index 0000000..937101c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.d.ts @@ -0,0 +1,16 @@ +import { LroEngineOptions } from "./models.js"; +import { LongRunningOperation } from "../../http/models.js"; +import { PollOperationState } from "../pollOperation.js"; +import { Poller } from "../poller.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} +//# sourceMappingURL=lroEngine.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.d.ts.map new file mode 100644 index 0000000..640c27e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAgB,MAAM,aAAa,CAAC;AAE7D,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAE5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAItC;;GAEG;AACH,qBAAa,SAAS,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CAAE,SAAQ,MAAM,CACxF,MAAM,EACN,OAAO,CACR;IACC,OAAO,CAAC,MAAM,CAAe;gBAEjB,GAAG,EAAE,oBAAoB,CAAC,OAAO,CAAC,EAAE,OAAO,CAAC,EAAE,gBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC;IA6B3F;;OAEG;IACH,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAGvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js new file mode 100644 index 0000000..351691a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { GenericPollOperation } from "./operation.js"; +import { POLL_INTERVAL_IN_MS } from "../../poller/constants.js"; +import { Poller } from "../poller.js"; +import { deserializeState } from "../../poller/operation.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export class LroEngine extends Poller { + config; + constructor(lro, options) { + const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +//# sourceMappingURL=lroEngine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js.map new file mode 100644 index 0000000..d5a5ef2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAEtD,OAAO,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAEhE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,OAAO,EAAE,gBAAgB,EAAE,MAAM,2BAA2B,CAAC;AAE7D;;GAEG;AACH,MAAM,OAAO,SAA+D,SAAQ,MAGnF;IACS,MAAM,CAAe;IAE7B,YAAY,GAAkC,EAAE,OAA2C;QACzF,MAAM,EACJ,YAAY,GAAG,mBAAmB,EAClC,UAAU,EACV,qBAAqB,GAAG,KAAK,EAC7B,MAAM,EACN,yBAAyB,EACzB,aAAa,EACb,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,KAAK,GAAqC,UAAU;YACxD,CAAC,CAAC,gBAAgB,CAAC,UAAU,CAAC;YAC9B,CAAC,CAAE,EAAuC,CAAC;QAC7C,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,CAAC,qBAAqB,EACtB,yBAAyB,EACzB,aAAa,EACb,WAAW,EACX,MAAM,CACP,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QACjB,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;QAEnD,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;QAC7C,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;IACzF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroEngineOptions, PollerConfig } from \"./models.js\";\nimport { GenericPollOperation } from \"./operation.js\";\nimport { LongRunningOperation } from \"../../http/models.js\";\nimport { POLL_INTERVAL_IN_MS } from \"../../poller/constants.js\";\nimport { PollOperationState } from \"../pollOperation.js\";\nimport { Poller } from \"../poller.js\";\nimport { RestorableOperationState } from \"../../poller/models.js\";\nimport { deserializeState } from \"../../poller/operation.js\";\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const {\n intervalInMs = POLL_INTERVAL_IN_MS,\n resumeFrom,\n resolveOnUnsuccessful = false,\n isDone,\n lroResourceLocationConfig,\n processResult,\n updateState,\n } = options || {};\n const state: RestorableOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as RestorableOperationState);\n const operation = new GenericPollOperation(\n state,\n lro,\n !resolveOnUnsuccessful,\n lroResourceLocationConfig,\n processResult,\n updateState,\n isDone,\n );\n super(operation);\n this.resolveOnUnsuccessful = resolveOnUnsuccessful;\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.d.ts new file mode 100644 index 0000000..bf26d04 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.d.ts @@ -0,0 +1,38 @@ +import { LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +/** + * Options for the LRO poller. + */ +export interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +export interface PollerConfig { + intervalInMs: number; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.d.ts.map new file mode 100644 index 0000000..c880365 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAE9E;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,OAAO,EAAE,MAAM;IAC/C;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,yBAAyB,CAAC,EAAE,yBAAyB,CAAC;IACtD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE;;OAEG;IACH,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC3D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,YAAY;IAC3B,YAAY,EAAE,MAAM,CAAC;CACtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.js b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.js.map new file mode 100644 index 0000000..bfc01b5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.d.ts new file mode 100644 index 0000000..d1257d1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.d.ts @@ -0,0 +1,27 @@ +import { LongRunningOperation, LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +import { PollOperation, PollOperationState } from "../pollOperation.js"; +import { RestorableOperationState } from "../../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { PollerConfig } from "./models.js"; +export declare class GenericPollOperation> implements PollOperation { + state: RestorableOperationState; + private lro; + private setErrorAsResult; + private lroResourceLocationConfig?; + private processResult?; + private updateState?; + private isDone?; + private pollerConfig?; + constructor(state: RestorableOperationState, lro: LongRunningOperation, setErrorAsResult: boolean, lroResourceLocationConfig?: LroResourceLocationConfig | undefined, processResult?: ((result: unknown, state: TState) => TResult) | undefined, updateState?: ((state: TState, lastResponse: RawResponse) => void) | undefined, isDone?: ((lastResponse: TResult, state: TState) => boolean) | undefined); + setPollerConfig(pollerConfig: PollerConfig): void; + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + cancel(): Promise>; + /** + * Serializes the Poller operation. + */ + toString(): string; +} +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.d.ts.map new file mode 100644 index 0000000..9b66455 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACpG,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACxE,OAAO,EAAE,wBAAwB,EAAc,MAAM,wBAAwB,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAyB3C,qBAAa,oBAAoB,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CACnF,YAAW,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAKhC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC;IAC9C,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,gBAAgB;IACxB,OAAO,CAAC,yBAAyB,CAAC;IAClC,OAAO,CAAC,aAAa,CAAC;IACtB,OAAO,CAAC,WAAW,CAAC;IACpB,OAAO,CAAC,MAAM,CAAC;IATjB,OAAO,CAAC,YAAY,CAAC,CAAe;gBAG3B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,EACtC,GAAG,EAAE,oBAAoB,EACzB,gBAAgB,EAAE,OAAO,EACzB,yBAAyB,CAAC,uCAA2B,EACrD,aAAa,CAAC,YAAW,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA,EAC3D,WAAW,CAAC,WAAU,MAAM,gBAAgB,WAAW,KAAK,IAAI,aAAA,EAChE,MAAM,CAAC,kBAAiB,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA;IAG7D,eAAe,CAAC,YAAY,EAAE,YAAY,GAAG,IAAI;IAIlD,MAAM,CAAC,OAAO,CAAC,EAAE;QACrB,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAwCrC,MAAM,IAAI,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAKvD;;OAEG;IACI,QAAQ,IAAI,MAAM;CAK1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js new file mode 100644 index 0000000..4051ad8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { initHttpOperation, pollHttpOperation } from "../../http/operation.js"; +import { logger } from "../../logger.js"; +const createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => (state.isCancelled = true), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.isStarted = true), + setSucceeded: (state) => (state.isCompleted = true), + setFailed: () => { + /** empty body */ + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), +}); +export class GenericPollOperation { + state; + lro; + setErrorAsResult; + lroResourceLocationConfig; + processResult; + updateState; + isDone; + pollerConfig; + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = { + ...this.state, + ...(await initHttpOperation({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + })), + }; + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === undefined) { + await pollHttpOperation({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState + ? (state, { rawResponse }) => updateState(state, rawResponse) + : undefined, + isDone: isDone + ? ({ flatResponse }, state) => isDone(flatResponse, state) + : undefined, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult, + }); + } + options?.fireProgress?.(this.state); + return this; + } + async cancel() { + logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js.map new file mode 100644 index 0000000..de88f0a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAG/E,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAQ;IAC3D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;IAC/C,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IACnD,SAAS,EAAE,GAAG,EAAE;QACd,iBAAiB;IACnB,CAAC;IAED,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW;IAC1C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;IAClC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS;IACvC,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;CACzF,CAAC,CAAC;AAEH,MAAM,OAAO,oBAAoB;IAMtB;IACC;IACA;IACA;IACA;IACA;IACA;IATF,YAAY,CAAgB;IAEpC,YACS,KAAuC,EACtC,GAAyB,EACzB,gBAAyB,EACzB,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D;QAN3D,UAAK,GAAL,KAAK,CAAkC;QACtC,QAAG,GAAH,GAAG,CAAsB;QACzB,qBAAgB,GAAhB,gBAAgB,CAAS;QACzB,8BAAyB,GAAzB,yBAAyB,CAA4B;QACrD,kBAAa,GAAb,aAAa,CAA8C;QAC3D,gBAAW,GAAX,WAAW,CAAqD;QAChE,WAAM,GAAN,MAAM,CAAoD;IACjE,CAAC;IAEG,eAAe,CAAC,YAA0B;QAC/C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,OAGZ;QACC,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,GAAG;gBACX,GAAG,IAAI,CAAC,KAAK;gBACb,GAAG,CAAC,MAAM,iBAAiB,CAAC;oBAC1B,GAAG,EAAE,IAAI,CAAC,GAAG;oBACb,UAAU;oBACV,sBAAsB,EAAE,IAAI,CAAC,yBAAyB;oBACtD,aAAa,EAAE,IAAI,CAAC,aAAa;oBACjC,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;iBACxC,CAAC,CAAC;aACJ,CAAC;QACJ,CAAC;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;QACrC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAE3B,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC9D,MAAM,iBAAiB,CAAC;gBACtB,GAAG,EAAE,IAAI,CAAC,GAAG;gBACb,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,UAAU;gBACV,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,WAAW,EAAE,WAAW;oBACtB,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,WAAW,EAAE,EAAE,EAAE,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,CAAC;oBAC7D,CAAC,CAAC,SAAS;gBACb,MAAM,EAAE,MAAM;oBACZ,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,MAAM,CAAC,YAAuB,EAAE,KAAK,CAAC;oBACrE,CAAC,CAAC,SAAS;gBACb,OAAO;gBACP,QAAQ,EAAE,CAAC,YAAY,EAAE,EAAE;oBACzB,IAAI,CAAC,YAAa,CAAC,YAAY,GAAG,YAAY,CAAC;gBACjD,CAAC;gBACD,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;aACxC,CAAC,CAAC;QACL,CAAC;QACD,OAAO,EAAE,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,MAAM;QACV,MAAM,CAAC,KAAK,CAAC,+DAA+D,CAAC,CAAC;QAC9E,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\nimport { PollOperation, PollOperationState } from \"../pollOperation.js\";\nimport { RestorableOperationState, StateProxy } from \"../../poller/models.js\";\nimport { initHttpOperation, pollHttpOperation } from \"../../http/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { PollerConfig } from \"./models.js\";\nimport { logger } from \"../../logger.js\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n initState: (config) => ({ config, isStarted: true }) as any,\n setCanceled: (state) => (state.isCancelled = true),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.isStarted = true),\n setSucceeded: (state) => (state.isCompleted = true),\n setFailed: () => {\n /** empty body */\n },\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => !!state.isCancelled,\n isFailed: (state) => !!state.error,\n isRunning: (state) => !!state.isStarted,\n isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error),\n});\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private pollerConfig?: PollerConfig;\n\n constructor(\n public state: RestorableOperationState,\n private lro: LongRunningOperation,\n private setErrorAsResult: boolean,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean,\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const stateProxy = createStateProxy();\n if (!this.state.isStarted) {\n this.state = {\n ...this.state,\n ...(await initHttpOperation({\n lro: this.lro,\n stateProxy,\n resourceLocationConfig: this.lroResourceLocationConfig,\n processResult: this.processResult,\n setErrorAsResult: this.setErrorAsResult,\n })),\n };\n }\n const updateState = this.updateState;\n const isDone = this.isDone;\n\n if (!this.state.isCompleted && this.state.error === undefined) {\n await pollHttpOperation({\n lro: this.lro,\n state: this.state,\n stateProxy,\n processResult: this.processResult,\n updateState: updateState\n ? (state, { rawResponse }) => updateState(state, rawResponse)\n : undefined,\n isDone: isDone\n ? ({ flatResponse }, state) => isDone(flatResponse as TResult, state)\n : undefined,\n options,\n setDelay: (intervalInMs) => {\n this.pollerConfig!.intervalInMs = intervalInMs;\n },\n setErrorAsResult: this.setErrorAsResult,\n });\n }\n options?.fireProgress?.(this.state);\n return this;\n }\n\n async cancel(): Promise> {\n logger.error(\"`cancelOperation` is deprecated because it wasn't implemented\");\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/models.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/models.d.ts new file mode 100644 index 0000000..6ecd744 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/models.d.ts @@ -0,0 +1,66 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollOperationState } from "./pollOperation.js"; +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * @deprecated `cancelOperation` has been deprecated because it was not implemented. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/models.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/models.d.ts.map new file mode 100644 index 0000000..f8e2c78 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAExD;;GAEG;AAEH,MAAM,WAAW,UAAU,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO;IAC7E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;;OAGG;IACH,eAAe,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5E;;;;OAIG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/models.js b/node_modules/@azure/core-lro/dist/esm/legacy/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/models.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/models.js.map new file mode 100644 index 0000000..4f2a390 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollOperationState } from \"./pollOperation.js\";\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n * @deprecated `cancelOperation` has been deprecated because it was not implemented.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.d.ts new file mode 100644 index 0000000..9d1b341 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.d.ts @@ -0,0 +1,81 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + * + * @deprecated `cancel` has been deprecated because it was not implemented. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} +//# sourceMappingURL=pollOperation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.d.ts.map new file mode 100644 index 0000000..257ba63 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.d.ts","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;;;;;;;;GAUG;AACH,MAAM,WAAW,kBAAkB,CAAC,OAAO;IACzC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,WAAW,aAAa,CAAC,MAAM,EAAE,OAAO;IAC5C;;;;OAIG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;;;;;;OAQG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QACf,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE5C;;;;;;;;;;OAUG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE7F;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.js b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.js new file mode 100644 index 0000000..f8c1767 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pollOperation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.js.map new file mode 100644 index 0000000..a1437f2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/pollOperation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.js","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * PollOperationState contains an opinionated list of the smallest set of properties needed\n * to define any long running operation poller.\n *\n * While the Poller class works as the local control mechanism to start triggering, wait for,\n * and potentially cancel a long running operation, the PollOperationState documents the status\n * of the remote long running operation.\n *\n * It should be updated at least when the operation starts, when it's finished, and when it's cancelled.\n * Though, implementations can have any other number of properties that can be updated by other reasons.\n */\nexport interface PollOperationState {\n /**\n * True if the operation has started.\n */\n isStarted?: boolean;\n /**\n * True if the operation has been completed.\n */\n isCompleted?: boolean;\n /**\n * True if the operation has been cancelled.\n */\n isCancelled?: boolean;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation concluded in a result of an expected type.\n */\n result?: TResult;\n}\n\n/**\n * PollOperation is an interface that defines how to update the local reference of the state of the remote\n * long running operation, just as well as how to request the cancellation of the same operation.\n *\n * It also has a method to serialize the operation so that it can be stored and resumed at any time.\n */\nexport interface PollOperation {\n /**\n * The state of the operation.\n * It will be used to store the basic properties of PollOperationState,\n * plus any custom property that the implementation may require.\n */\n state: TState;\n\n /**\n * Defines how to request the remote service for updates on the status of the long running operation.\n *\n * It optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n * Also optionally receives a \"fireProgress\" function, which, if called, is responsible for triggering the\n * poller's onProgress callbacks.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise>;\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * It returns a promise that should be resolved with an updated version of the poller's operation.\n *\n * @param options - Optional properties passed to the operation's update method.\n *\n * @deprecated `cancel` has been deprecated because it was not implemented.\n */\n cancel(options?: { abortSignal?: AbortSignalLike }): Promise>;\n\n /**\n * Serializes the operation.\n * Useful when wanting to create a poller that monitors an existing operation.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/poller.d.ts b/node_modules/@azure/core-lro/dist/esm/legacy/poller.d.ts new file mode 100644 index 0000000..e748266 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/poller.d.ts @@ -0,0 +1,327 @@ +import { PollOperation, PollOperationState } from "./pollOperation.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollerLike } from "./models.js"; +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export type PollProgressCallback = (state: TState) => void; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + /** controls whether to throw an error if the operation failed or was canceled. */ + protected resolveOnUnsuccessful: boolean; + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-util"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + private processUpdatedState; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/poller.d.ts.map b/node_modules/@azure/core-lro/dist/esm/legacy/poller.d.ts.map new file mode 100644 index 0000000..8ae1b13 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;;GAIG;AACH,MAAM,MAAM,oBAAoB,CAAC,MAAM,IAAI,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;AAEnE;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,KAAK;gBAC/B,OAAO,EAAE,MAAM;CAK5B;AAED;;;GAGG;AACH,qBAAa,oBAAqB,SAAQ,KAAK;gBACjC,OAAO,EAAE,MAAM;CAK5B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AAEH,8BAAsB,MAAM,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO,CAC9E,YAAW,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC;IAEtC,kFAAkF;IAClF,SAAS,CAAC,qBAAqB,EAAE,OAAO,CAAS;IACjD,OAAO,CAAC,OAAO,CAAiB;IAChC,OAAO,CAAC,OAAO,CAAC,CAA2B;IAC3C,OAAO,CAAC,MAAM,CAAC,CAAqE;IACpF,OAAO,CAAC,eAAe,CAAC,CAAgB;IACxC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,OAAO,CAAmB;IAClC,OAAO,CAAC,qBAAqB,CAAsC;IAEnE;;;OAGG;IACH,SAAS,CAAC,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;gBACS,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAmBrD;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,SAAS,CAAC,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEzC;;;OAGG;YACW,YAAY;IAU1B;;;;;;;OAOG;YACW,QAAQ;IAUtB;;;;;;;OAOG;IACH,OAAO,CAAC,YAAY;IAMpB;;OAEG;YACW,UAAU;IAIxB;;;;;;;OAOG;IACI,IAAI,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAW3E,OAAO,CAAC,mBAAmB;IA0B3B;;OAEG;IACU,aAAa,CACxB,WAAW,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAClD,OAAO,CAAC,OAAO,CAAC;IAUnB;;;;;OAKG;IACI,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB;IAOtE;;OAEG;IACI,MAAM,IAAI,OAAO;IAKxB;;OAEG;IACI,WAAW,IAAI,IAAI;IAS1B;;OAEG;IACI,SAAS,IAAI,OAAO;IAI3B;;;;;;;;OAQG;IACI,eAAe,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAStF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB,IAAI,MAAM;IAIlC;;;;;OAKG;IACI,SAAS,IAAI,OAAO,GAAG,SAAS;IAKvC;;;OAGG;IACI,QAAQ,IAAI,MAAM;CAG1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/poller.js b/node_modules/@azure/core-lro/dist/esm/legacy/poller.js new file mode 100644 index 0000000..de6f912 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/poller.js @@ -0,0 +1,407 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +export class Poller { + /** controls whether to throw an error if the operation failed or was canceled. */ + resolveOnUnsuccessful = false; + stopped = true; + resolve; + reject; + pollOncePromise; + cancelPromise; + promise; + pollProgressCallbacks = []; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + operation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } + } + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + // This is needed because the state could have been updated by + // `cancelOperation`, e.g. the operation is canceled or an error occurred. + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/poller.js.map b/node_modules/@azure/core-lro/dist/esm/legacy/poller.js.map new file mode 100644 index 0000000..d3bec33 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/legacy/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAclC;;;GAGG;AACH,MAAM,OAAO,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;IAC5D,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;IAC9D,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AACH,gDAAgD;AAChD,MAAM,OAAgB,MAAM;IAG1B,kFAAkF;IACxE,qBAAqB,GAAY,KAAK,CAAC;IACzC,OAAO,GAAY,IAAI,CAAC;IACxB,OAAO,CAA4B;IACnC,MAAM,CAAsE;IAC5E,eAAe,CAAiB;IAChC,aAAa,CAAiB;IAC9B,OAAO,CAAmB;IAC1B,qBAAqB,GAAmC,EAAE,CAAC;IAEnE;;;OAGG;IACO,SAAS,CAAiC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;IACH,YAAY,SAAyC;QACnD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,EAC1E,EAAE;YACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACvB,CAAC,CACF,CAAC;QACF,mFAAmF;QACnF,sFAAsF;QACtF,mFAAmF;QACnF,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE;YACtB,yBAAyB;QAC3B,CAAC,CAAC,CAAC;IACL,CAAC;IAyBD;;;OAGG;IACK,KAAK,CAAC,YAAY,CAAC,cAAiD,EAAE;QAC5E,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;QACvB,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YAC3C,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAC7B,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QACrB,CAAC;IACH,CAAC;IAED;;;;;;;OAOG;IACK,KAAK,CAAC,QAAQ,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YACnB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;gBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;aAC3C,CAAC,CAAC;QACL,CAAC;QACD,IAAI,CAAC,mBAAmB,EAAE,CAAC;IAC7B,CAAC;IAED;;;;;;;OAOG;IACK,YAAY,CAAC,KAAa;QAChC,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE,CAAC;YAClD,QAAQ,CAAC,KAAK,CAAC,CAAC;QAClB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,UAAU,CAAC,UAA6C,EAAE;QACtE,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACxD,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CAAC,UAA6C,EAAE;QACzD,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,GAAS,EAAE;gBACtC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;YACnC,CAAC,CAAC;YACF,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3F,CAAC;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;IAC9B,CAAC;IAEO,mBAAmB;QACzB,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,IAAI,CAAC,MAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBACzC,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC;YACnC,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;YACrC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,MAAM,KAAK,GAAG,IAAI,oBAAoB,CAAC,wBAAwB,CAAC,CAAC;gBACjE,IAAI,CAAC,MAAO,CAAC,KAAK,CAAC,CAAC;gBACpB,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YAClC,uEAAuE;YACvE,uEAAuE;YACvE,oEAAoE;YACpE,uEAAuE;YACvE,cAAc;YACd,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAa,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,aAAa,CACxB,cAAiD,EAAE;QAEnD,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,CAAC;QACD,8DAA8D;QAC9D,0EAA0E;QAC1E,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAC3B,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;OAKG;IACI,UAAU,CAAC,QAAiC;QACjD,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC1C,OAAO,GAAS,EAAE;YAChB,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;QACxF,CAAC,CAAC;IACJ,CAAC;IAED;;OAEG;IACI,MAAM;QACX,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;gBAChB,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;YACxE,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC;YACxB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QAChD,CAAC;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;QAC3D,CAAC;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB;QACtB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B,CAAC;IAED;;;;;OAKG;IACI,SAAS;QACd,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollerLike } from \"./models.js\";\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When the operation is cancelled, the poller will be rejected with an instance\n * of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n /** controls whether to throw an error if the operation failed or was canceled. */\n protected resolveOnUnsuccessful: boolean = false;\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void,\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n },\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-util\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(pollOptions: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll(pollOptions);\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n }\n this.processUpdatedState();\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n private processUpdatedState(): void {\n if (this.operation.state.error) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n this.reject!(this.operation.state.error);\n throw this.operation.state.error;\n }\n }\n if (this.operation.state.isCancelled) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n const error = new PollerCancelledError(\"Operation was canceled\");\n this.reject!(error);\n throw error;\n }\n }\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.getResult() as TResult);\n }\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(\n pollOptions: { abortSignal?: AbortSignalLike } = {},\n ): Promise {\n if (this.stopped) {\n this.startPolling(pollOptions).catch(this.reject);\n }\n // This is needed because the state could have been updated by\n // `cancelOperation`, e.g. the operation is canceled or an error occurred.\n this.processUpdatedState();\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/logger.d.ts b/node_modules/@azure/core-lro/dist/esm/logger.d.ts new file mode 100644 index 0000000..52138fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/logger.d.ts @@ -0,0 +1,6 @@ +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=logger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/logger.d.ts.map b/node_modules/@azure/core-lro/dist/esm/logger.d.ts.map new file mode 100644 index 0000000..3763b49 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/logger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,eAAO,MAAM,MAAM,qCAAiC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/logger.js b/node_modules/@azure/core-lro/dist/esm/logger.js new file mode 100644 index 0000000..094bfe4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/logger.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export const logger = createClientLogger("core-lro"); +//# sourceMappingURL=logger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/logger.js.map b/node_modules/@azure/core-lro/dist/esm/logger.js.map new file mode 100644 index 0000000..8f21af4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/logger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;;GAGG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/package.json b/node_modules/@azure/core-lro/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-lro/dist/esm/poller/constants.d.ts b/node_modules/@azure/core-lro/dist/esm/poller/constants.d.ts new file mode 100644 index 0000000..a4e88ee --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/constants.d.ts @@ -0,0 +1,9 @@ +/** + * The default time interval to wait before sending the next polling request. + */ +export declare const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export declare const terminalStates: string[]; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/constants.d.ts.map b/node_modules/@azure/core-lro/dist/esm/poller/constants.d.ts.map new file mode 100644 index 0000000..206a7fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,mBAAmB,OAAO,CAAC;AACxC;;GAEG;AACH,eAAO,MAAM,cAAc,UAAsC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/constants.js b/node_modules/@azure/core-lro/dist/esm/poller/constants.js new file mode 100644 index 0000000..e35aad7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/constants.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The default time interval to wait before sending the next polling request. + */ +export const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export const terminalStates = ["succeeded", "canceled", "failed"]; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/constants.js.map b/node_modules/@azure/core-lro/dist/esm/poller/constants.js.map new file mode 100644 index 0000000..539a956 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,CAAC;AACxC;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,WAAW,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The default time interval to wait before sending the next polling request.\n */\nexport const POLL_INTERVAL_IN_MS = 2000;\n/**\n * The closed set of terminal states.\n */\nexport const terminalStates = [\"succeeded\", \"canceled\", \"failed\"];\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/models.d.ts b/node_modules/@azure/core-lro/dist/esm/poller/models.d.ts new file mode 100644 index 0000000..80845ac --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/models.d.ts @@ -0,0 +1,221 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * Configurations for how to poll the operation and to check whether it has + * terminated. + */ +export interface OperationConfig { + /** The operation location */ + operationLocation?: string; + /** The resource location */ + resourceLocation?: string; + /** metadata about the operation */ + metadata?: Record; +} +/** + * The description of an operation. + */ +export interface Operation { + /** + * Sends the initiation request and returns, in addition to the response, the + * operation location, the potential resource location, and a set of metadata. + */ + init: () => Promise; + /** + * Sends the polling request. + */ + poll: (location: string, options?: TOptions) => Promise; +} +/** + * Type of a restorable long-running operation. + */ +export type RestorableOperationState = T & { + /** The operation configuration */ + config: OperationConfig; +}; +/** + * Options for `createPoller`. + */ +export interface CreatePollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: TResponse, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: TResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; +} +export interface LroError { + code: string; + innererror?: InnerError; + message: string; +} +export interface InnerError { + code: string; + message: string; + innererror?: InnerError; +} +/** + * Options for `buildCreatePoller`. + */ +export interface BuildCreatePollerOptions { + /** + * Gets the status of the operation from the response received when the + * operation was initialized. Note that the operation could be already in + * a terminal state at this time. + */ + getStatusFromInitialResponse: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + /** + * Gets the status of the operation from a response received when the + * operation was polled. + */ + getStatusFromPollResponse: (response: TResponse, state: RestorableOperationState) => OperationStatus; + /** + * Determines if the input error is an operation error. + */ + isOperationError: (error: Error) => boolean; + /** + * Gets the updated operation location from polling responses. + */ + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets the resource location from a response. + */ + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets from the response the time interval the service suggests the client to + * wait before sending the next polling request. + */ + getPollingInterval?: (response: TResponse) => number | undefined; + /** + * Extracts an error model from a response. + */ + getError?: (response: TResponse) => LroError | undefined; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful: boolean; +} +/** + * The set of possible states an operation can be in at any given time. + */ +export type OperationStatus = "notStarted" | "running" | "succeeded" | "canceled" | "failed"; +/** + * While the poller works as the local control mechanism to start triggering and + * wait for a long-running operation, OperationState documents the status of + * the remote long-running operation. It gets updated after each poll. + */ +export interface OperationState { + /** + * The current status of the operation. + */ + status: OperationStatus; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation produced a result of the expected type. + */ + result?: TResult; +} +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export type CancelOnProgress = () => void; +/** + * A simple poller interface. + */ +export interface SimplePollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Returns the state of the operation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +/** + * A state proxy that allows poller implementation to abstract away the operation + * state. This is useful to implement `lroEngine` and `createPoller` in a modular + * way. + */ +export interface StateProxy { + initState: (config: OperationConfig) => RestorableOperationState; + setRunning: (state: TState) => void; + setCanceled: (state: TState) => void; + setResult: (state: TState, result: TResult) => void; + setError: (state: TState, error: Error) => void; + setFailed: (state: TState) => void; + setSucceeded: (state: TState) => void; + isRunning: (state: TState) => boolean; + isCanceled: (state: TState) => boolean; + getResult: (state: TState) => TResult | undefined; + getError: (state: TState) => Error | undefined; + isFailed: (state: TState) => boolean; + isSucceeded: (state: TState) => boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/models.d.ts.map b/node_modules/@azure/core-lro/dist/esm/poller/models.d.ts.map new file mode 100644 index 0000000..6e85a3f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,6BAA6B;IAC7B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,4BAA4B;IAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,mCAAmC;IACnC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACnC;AAED;;GAEG;AACH,MAAM,WAAW,SAAS,CAAC,SAAS,EAAE,QAAQ;IAC5C;;;OAGG;IACH,IAAI,EAAE,MAAM,OAAO,CACjB,eAAe,GAAG;QAChB,QAAQ,EAAE,SAAS,CAAC;KACrB,CACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,KAAK,OAAO,CAAC,SAAS,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,MAAM,wBAAwB,CAAC,CAAC,IAAI,CAAC,GAAG;IAC5C,kCAAkC;IAClC,MAAM,EAAE,eAAe,CAAC;CACzB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM;IAC7D;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;CAC7D;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB,CAAC,SAAS,EAAE,MAAM;IACzD;;;;OAIG;IACH,4BAA4B,EAAE,CAAC,MAAM,EAAE;QACrC,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB;;;OAGG;IACH,yBAAyB,EAAE,CACzB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB;;OAEG;IACH,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C;;OAEG;IACH,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;OAEG;IACH,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD;;OAEG;IACH,qBAAqB,EAAE,OAAO,CAAC;CAChC;AAED;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,YAAY,GAAG,SAAS,GAAG,WAAW,GAAG,UAAU,GAAG,QAAQ,CAAC;AAE7F;;;;GAIG;AACH,MAAM,WAAW,cAAc,CAAC,OAAO;IACrC;;OAEG;IACH,MAAM,EAAE,eAAe,CAAC;IACxB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC;AAE1C;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAAE,OAAO;IAC/E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;OAEG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU,CAAC,MAAM,EAAE,OAAO;IACzC,SAAS,EAAE,CAAC,MAAM,EAAE,eAAe,KAAK,wBAAwB,CAAC,MAAM,CAAC,CAAC;IAEzE,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,KAAK,IAAI,CAAC;IACpD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IAChD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,YAAY,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACtC,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACvC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,SAAS,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,KAAK,GAAG,SAAS,CAAC;IAC/C,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACrC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;CACzC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/models.js b/node_modules/@azure/core-lro/dist/esm/poller/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/models.js.map b/node_modules/@azure/core-lro/dist/esm/poller/models.js.map new file mode 100644 index 0000000..554eb0c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Configurations for how to poll the operation and to check whether it has\n * terminated.\n */\nexport interface OperationConfig {\n /** The operation location */\n operationLocation?: string;\n /** The resource location */\n resourceLocation?: string;\n /** metadata about the operation */\n metadata?: Record;\n}\n\n/**\n * The description of an operation.\n */\nexport interface Operation {\n /**\n * Sends the initiation request and returns, in addition to the response, the\n * operation location, the potential resource location, and a set of metadata.\n */\n init: () => Promise<\n OperationConfig & {\n response: TResponse;\n }\n >;\n /**\n * Sends the polling request.\n */\n poll: (location: string, options?: TOptions) => Promise;\n}\n\n/**\n * Type of a restorable long-running operation.\n */\nexport type RestorableOperationState = T & {\n /** The operation configuration */\n config: OperationConfig;\n};\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreatePollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: TResponse, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: TResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n}\n\nexport interface LroError {\n code: string;\n innererror?: InnerError;\n message: string;\n}\n\nexport interface InnerError {\n code: string;\n message: string;\n innererror?: InnerError;\n}\n\n/**\n * Options for `buildCreatePoller`.\n */\nexport interface BuildCreatePollerOptions {\n /**\n * Gets the status of the operation from the response received when the\n * operation was initialized. Note that the operation could be already in\n * a terminal state at this time.\n */\n getStatusFromInitialResponse: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n /**\n * Gets the status of the operation from a response received when the\n * operation was polled.\n */\n getStatusFromPollResponse: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n /**\n * Determines if the input error is an operation error.\n */\n isOperationError: (error: Error) => boolean;\n /**\n * Gets the updated operation location from polling responses.\n */\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets the resource location from a response.\n */\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets from the response the time interval the service suggests the client to\n * wait before sending the next polling request.\n */\n getPollingInterval?: (response: TResponse) => number | undefined;\n /**\n * Extracts an error model from a response.\n */\n getError?: (response: TResponse) => LroError | undefined;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful: boolean;\n}\n\n/**\n * The set of possible states an operation can be in at any given time.\n */\nexport type OperationStatus = \"notStarted\" | \"running\" | \"succeeded\" | \"canceled\" | \"failed\";\n\n/**\n * While the poller works as the local control mechanism to start triggering and\n * wait for a long-running operation, OperationState documents the status of\n * the remote long-running operation. It gets updated after each poll.\n */\nexport interface OperationState {\n /**\n * The current status of the operation.\n */\n status: OperationStatus;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation produced a result of the expected type.\n */\n result?: TResult;\n}\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * A simple poller interface.\n */\nexport interface SimplePollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Returns the state of the operation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A state proxy that allows poller implementation to abstract away the operation\n * state. This is useful to implement `lroEngine` and `createPoller` in a modular\n * way.\n */\nexport interface StateProxy {\n initState: (config: OperationConfig) => RestorableOperationState;\n\n setRunning: (state: TState) => void;\n setCanceled: (state: TState) => void;\n setResult: (state: TState, result: TResult) => void;\n setError: (state: TState, error: Error) => void;\n setFailed: (state: TState) => void;\n setSucceeded: (state: TState) => void;\n\n isRunning: (state: TState) => boolean;\n isCanceled: (state: TState) => boolean;\n getResult: (state: TState) => TResult | undefined;\n getError: (state: TState) => Error | undefined;\n isFailed: (state: TState) => boolean;\n isSucceeded: (state: TState) => boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/operation.d.ts b/node_modules/@azure/core-lro/dist/esm/poller/operation.d.ts new file mode 100644 index 0000000..a23a90f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/operation.d.ts @@ -0,0 +1,40 @@ +import { LroError, Operation, OperationStatus, RestorableOperationState, StateProxy } from "./models.js"; +/** + * Deserializes the state + */ +export declare function deserializeState(serializedState: string): RestorableOperationState; +/** + * Initiates the long-running operation. + */ +export declare function initOperation(inputs: { + init: Operation["init"]; + stateProxy: StateProxy; + getOperationStatus: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + processResult?: (result: TResponse, state: TState) => TResult; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + setErrorAsResult: boolean; +}): Promise>; +/** Polls the long-running operation. */ +export declare function pollOperation(inputs: { + poll: Operation["poll"]; + stateProxy: StateProxy; + state: RestorableOperationState; + getOperationStatus: (response: TResponse, state: RestorableOperationState) => OperationStatus; + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + isOperationError: (error: Error) => boolean; + getPollingInterval?: (response: TResponse) => number | undefined; + setDelay: (intervalInMs: number) => void; + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + processResult?: (result: TResponse, state: TState) => TResult; + getError?: (response: TResponse) => LroError | undefined; + updateState?: (state: TState, lastResponse: TResponse) => void; + isDone?: (lastResponse: TResponse, state: TState) => boolean; + setErrorAsResult: boolean; + options?: TOptions; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/operation.d.ts.map b/node_modules/@azure/core-lro/dist/esm/poller/operation.d.ts.map new file mode 100644 index 0000000..d8a62db --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,QAAQ,EAER,SAAS,EACT,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,aAAa,CAAC;AAIrB;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,MAAM,EACrC,eAAe,EAAE,MAAM,GACtB,wBAAwB,CAAC,MAAM,CAAC,CAMlC;AAuGD;;GAEG;AACH,wBAAsB,aAAa,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IACtE,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC;IAC5C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAAC,MAAM,EAAE;QAC3B,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAqB5C;AA4DD,wCAAwC;AACxC,wBAAsB,aAAa,CAAC,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;IAChF,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC;IAC7C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAClB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC7D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,OAAO,CAAC,EAAE,QAAQ,CAAC;CACpB,GAAG,OAAO,CAAC,IAAI,CAAC,CAsDhB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/operation.js b/node_modules/@azure/core-lro/dist/esm/poller/operation.js new file mode 100644 index 0000000..869eaa5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/operation.js @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger } from "../logger.js"; +import { terminalStates } from "./constants.js"; +/** + * Deserializes the state + */ +export function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } +} +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; +} +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError?.(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if (isDone?.(response, state) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +export async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation?.(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } + } + return { response, status }; +} +/** Polls the long-running operation. */ +export async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!terminalStates.includes(status)) { + const intervalInMs = getPollingInterval?.(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation?.(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation?.(location, isUpdated); + } + else + withOperationLocation?.(operationLocation, false); + } + updateState?.(state, response); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/operation.js.map b/node_modules/@azure/core-lro/dist/esm/poller/operation.js.map new file mode 100644 index 0000000..849e973 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAUlC,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAEhD;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAC9B,eAAuB;IAEvB,IAAI,CAAC;QACH,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;IAC3C,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAI,KAAK,CAAC,sCAAsC,eAAe,EAAE,CAAC,CAAC;IAC3E,CAAC;AACH,CAAC;AAED,SAAS,aAAa,CAAkB,MAIvC;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IACvD,OAAO,CAAC,KAAY,EAAE,EAAE;QACtB,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC;YAC5B,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YAClC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QAC9B,CAAC;QACD,MAAM,KAAK,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;QAC9B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;IAC1B,CAAC;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED,SAAS,aAAa,CAAC,GAAa;IAIlC,IAAI,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC;IAC1B,IAAI,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACpB,IAAI,MAAM,GAAG,GAAiB,CAAC;IAC/B,OAAO,MAAM,CAAC,UAAU,EAAE,CAAC;QACzB,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC;QAC3B,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACnB,OAAO,GAAG,0BAA0B,CAAC,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAChE,CAAC;IACD,OAAO;QACL,IAAI;QACJ,OAAO;KACR,CAAC;AACJ,CAAC;AAED,SAAS,sBAAsB,CAA6B,MAS3D;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,QAAQ,EAAE,gBAAgB,EAAE,GAC9F,MAAM,CAAC;IACT,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,WAAW,CAAC,CAAC,CAAC;YACjB,UAAU,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;YAC/B,MAAM;QACR,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,GAAG,GAAG,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC;YACjC,IAAI,OAAO,GAAG,EAAE,CAAC;YACjB,IAAI,GAAG,EAAE,CAAC;gBACR,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;gBAC7C,OAAO,GAAG,KAAK,IAAI,KAAK,OAAO,EAAE,CAAC;YACpC,CAAC;YACD,MAAM,MAAM,GAAG,wCAAwC,OAAO,EAAE,CAAC;YACjE,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;YAC9C,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC5B,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACvB,MAAM;QACR,CAAC;QACD,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,UAAU,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;YAC9B,MAAM;QACR,CAAC;IACH,CAAC;IACD,IACE,MAAM,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC;QACzB,CAAC,MAAM,KAAK,SAAS;YACnB,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EACxF,CAAC;QACD,UAAU,CAAC,SAAS,CAClB,KAAK,EACL,WAAW,CAAC;YACV,QAAQ;YACR,KAAK;YACL,aAAa;SACd,CAAC,CACH,CAAC;IACJ,CAAC;AACH,CAAC;AAED,SAAS,WAAW,CAA6B,MAIhD;IACC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;IAClD,OAAO,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAE,QAA+B,CAAC;AAC3F,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAA6B,MAW/D;IACC,MAAM,EACJ,IAAI,EACJ,UAAU,EACV,aAAa,EACb,kBAAkB,EAClB,qBAAqB,EACrB,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC;IACjF,IAAI,iBAAiB;QAAE,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;IACzE,MAAM,MAAM,GAAG;QACb,QAAQ;QACR,iBAAiB;QACjB,gBAAgB;KACjB,CAAC;IACF,MAAM,CAAC,OAAO,CAAC,6BAA6B,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,KAAK,GAAG,UAAU,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,MAAM,GAAG,kBAAkB,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAC1E,sBAAsB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,gBAAgB,EAAE,aAAa,EAAE,CAAC,CAAC;IACjG,OAAO,KAAK,CAAC;AACf,CAAC;AAED,KAAK,UAAU,mBAAmB,CAAuC,MAexE;IAIC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,iBAAiB,EACjB,kBAAkB,EAClB,mBAAmB,EACnB,gBAAgB,EAChB,OAAO,GACR,GAAG,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC,KAAK,CAC3D,aAAa,CAAC;QACZ,KAAK;QACL,UAAU;QACV,gBAAgB;KACjB,CAAC,CACH,CAAC;IACF,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IACnD,MAAM,CAAC,OAAO,CACZ,iCACE,KAAK,CAAC,MAAM,CAAC,iBACf,yBAAyB,MAAM,uBAC7B,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAChD,EAAE,CACH,CAAC;IACF,IAAI,MAAM,KAAK,WAAW,EAAE,CAAC;QAC3B,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC9D,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,OAAO;gBACL,QAAQ,EAAE,MAAM,IAAI,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAC1C,aAAa,CAAC,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,CAAC,CACvD;gBACD,MAAM;aACP,CAAC;QACJ,CAAC;IACH,CAAC;IACD,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;AAC9B,CAAC;AAED,wCAAwC;AACxC,MAAM,CAAC,KAAK,UAAU,aAAa,CAAuC,MA0BzE;IACC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,OAAO,EACP,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,aAAa,EACb,QAAQ,EACR,WAAW,EACX,QAAQ,EACR,MAAM,EACN,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;IAC3C,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;QACpC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAmB,CAAC;YACrD,IAAI;YACJ,kBAAkB;YAClB,KAAK;YACL,UAAU;YACV,iBAAiB;YACjB,mBAAmB;YACnB,gBAAgB;YAChB,OAAO;SACR,CAAC,CAAC;QACH,sBAAsB,CAAC;YACrB,MAAM;YACN,QAAQ;YACR,KAAK;YACL,UAAU;YACV,MAAM;YACN,aAAa;YACb,QAAQ;YACR,gBAAgB;SACjB,CAAC,CAAC;QAEH,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrC,MAAM,YAAY,GAAG,kBAAkB,EAAE,CAAC,QAAQ,CAAC,CAAC;YACpD,IAAI,YAAY;gBAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;YACzC,MAAM,QAAQ,GAAG,oBAAoB,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YACzD,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;gBAC3B,MAAM,SAAS,GAAG,iBAAiB,KAAK,QAAQ,CAAC;gBACjD,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,QAAQ,CAAC;gBAC1C,qBAAqB,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;YAC/C,CAAC;;gBAAM,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC3D,CAAC;QACD,WAAW,EAAE,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IACjC,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroError,\n InnerError,\n Operation,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"./models.js\";\nimport { logger } from \"../logger.js\";\nimport { terminalStates } from \"./constants.js\";\n\n/**\n * Deserializes the state\n */\nexport function deserializeState(\n serializedState: string,\n): RestorableOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`Unable to deserialize input state: ${serializedState}`);\n }\n}\n\nfunction setStateError(inputs: {\n state: TState;\n stateProxy: StateProxy;\n isOperationError: (error: Error) => boolean;\n}): (error: Error) => never {\n const { state, stateProxy, isOperationError } = inputs;\n return (error: Error) => {\n if (isOperationError(error)) {\n stateProxy.setError(state, error);\n stateProxy.setFailed(state);\n }\n throw error;\n };\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\nfunction simplifyError(err: LroError): {\n code: string;\n message: string;\n} {\n let message = err.message;\n let code = err.code;\n let curErr = err as InnerError;\n while (curErr.innererror) {\n curErr = curErr.innererror;\n code = curErr.code;\n message = appendReadableErrorMessage(message, curErr.message);\n }\n return {\n code,\n message,\n };\n}\n\nfunction processOperationStatus(result: {\n status: OperationStatus;\n response: TResponse;\n state: RestorableOperationState;\n stateProxy: StateProxy;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n}): void {\n const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } =\n result;\n switch (status) {\n case \"succeeded\": {\n stateProxy.setSucceeded(state);\n break;\n }\n case \"failed\": {\n const err = getError?.(response);\n let postfix = \"\";\n if (err) {\n const { code, message } = simplifyError(err);\n postfix = `. ${code}. ${message}`;\n }\n const errStr = `The long-running operation has failed${postfix}`;\n stateProxy.setError(state, new Error(errStr));\n stateProxy.setFailed(state);\n logger.warning(errStr);\n break;\n }\n case \"canceled\": {\n stateProxy.setCanceled(state);\n break;\n }\n }\n if (\n isDone?.(response, state) ||\n (isDone === undefined &&\n [\"succeeded\", \"canceled\"].concat(setErrorAsResult ? [] : [\"failed\"]).includes(status))\n ) {\n stateProxy.setResult(\n state,\n buildResult({\n response,\n state,\n processResult,\n }),\n );\n }\n}\n\nfunction buildResult(inputs: {\n response: TResponse;\n state: TState;\n processResult?: (result: TResponse, state: TState) => TResult;\n}): TResult {\n const { processResult, response, state } = inputs;\n return processResult ? processResult(response, state) : (response as unknown as TResult);\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initOperation(inputs: {\n init: Operation[\"init\"];\n stateProxy: StateProxy;\n getOperationStatus: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n processResult?: (result: TResponse, state: TState) => TResult;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n setErrorAsResult: boolean;\n}): Promise> {\n const {\n init,\n stateProxy,\n processResult,\n getOperationStatus,\n withOperationLocation,\n setErrorAsResult,\n } = inputs;\n const { operationLocation, resourceLocation, metadata, response } = await init();\n if (operationLocation) withOperationLocation?.(operationLocation, false);\n const config = {\n metadata,\n operationLocation,\n resourceLocation,\n };\n logger.verbose(`LRO: Operation description:`, config);\n const state = stateProxy.initState(config);\n const status = getOperationStatus({ response, state, operationLocation });\n processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult });\n return state;\n}\n\nasync function pollOperationHelper(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n operationLocation: string;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n options?: TOptions;\n}): Promise<{\n status: OperationStatus;\n response: TResponse;\n}> {\n const {\n poll,\n state,\n stateProxy,\n operationLocation,\n getOperationStatus,\n getResourceLocation,\n isOperationError,\n options,\n } = inputs;\n const response = await poll(operationLocation, options).catch(\n setStateError({\n state,\n stateProxy,\n isOperationError,\n }),\n );\n const status = getOperationStatus(response, state);\n logger.verbose(\n `LRO: Status:\\n\\tPolling from: ${\n state.config.operationLocation\n }\\n\\tOperation status: ${status}\\n\\tPolling status: ${\n terminalStates.includes(status) ? \"Stopped\" : \"Running\"\n }`,\n );\n if (status === \"succeeded\") {\n const resourceLocation = getResourceLocation(response, state);\n if (resourceLocation !== undefined) {\n return {\n response: await poll(resourceLocation).catch(\n setStateError({ state, stateProxy, isOperationError }),\n ),\n status,\n };\n }\n }\n return { response, status };\n}\n\n/** Polls the long-running operation. */\nexport async function pollOperation(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n getPollingInterval?: (response: TResponse) => number | undefined;\n setDelay: (intervalInMs: number) => void;\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n updateState?: (state: TState, lastResponse: TResponse) => void;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n options?: TOptions;\n}): Promise {\n const {\n poll,\n state,\n stateProxy,\n options,\n getOperationStatus,\n getResourceLocation,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n processResult,\n getError,\n updateState,\n setDelay,\n isDone,\n setErrorAsResult,\n } = inputs;\n const { operationLocation } = state.config;\n if (operationLocation !== undefined) {\n const { response, status } = await pollOperationHelper({\n poll,\n getOperationStatus,\n state,\n stateProxy,\n operationLocation,\n getResourceLocation,\n isOperationError,\n options,\n });\n processOperationStatus({\n status,\n response,\n state,\n stateProxy,\n isDone,\n processResult,\n getError,\n setErrorAsResult,\n });\n\n if (!terminalStates.includes(status)) {\n const intervalInMs = getPollingInterval?.(response);\n if (intervalInMs) setDelay(intervalInMs);\n const location = getOperationLocation?.(response, state);\n if (location !== undefined) {\n const isUpdated = operationLocation !== location;\n state.config.operationLocation = location;\n withOperationLocation?.(location, isUpdated);\n } else withOperationLocation?.(operationLocation, false);\n }\n updateState?.(state, response);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/poller.d.ts b/node_modules/@azure/core-lro/dist/esm/poller/poller.d.ts new file mode 100644 index 0000000..dc9e8fa --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/poller.d.ts @@ -0,0 +1,9 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { BuildCreatePollerOptions, CreatePollerOptions, Operation, OperationState, SimplePollerLike } from "./models.js"; +/** + * Returns a poller factory. + */ +export declare function buildCreatePoller>(inputs: BuildCreatePollerOptions): (lro: Operation, options?: CreatePollerOptions) => Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/poller.d.ts.map b/node_modules/@azure/core-lro/dist/esm/poller/poller.d.ts.map new file mode 100644 index 0000000..b300b2e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EACL,wBAAwB,EACxB,mBAAmB,EACnB,SAAS,EACT,cAAc,EAEd,gBAAgB,EAEjB,MAAM,aAAa,CAAC;AA8BrB;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAC1F,MAAM,EAAE,wBAAwB,CAAC,SAAS,EAAE,MAAM,CAAC,GAClD,CACD,GAAG,EAAE,SAAS,CAAC,SAAS,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,CAAC,EAC5D,OAAO,CAAC,EAAE,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC,KACtD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CA6J9C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/poller.js b/node_modules/@azure/core-lro/dist/esm/poller/poller.js new file mode 100644 index 0000000..5fc9b09 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/poller.js @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { deserializeState, initOperation, pollOperation } from "./operation.js"; +import { POLL_INTERVAL_IN_MS } from "./constants.js"; +import { delay } from "@azure/core-util"; +const createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => (state.status = "canceled"), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.status = "running"), + setSucceeded: (state) => (state.status = "succeeded"), + setFailed: (state) => (state.status = "failed"), + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded", +}); +/** + * Returns a poller factory. + */ +export function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback + ? (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() + : undefined; + const state = restoreFrom + ? deserializeState(restoreFrom) + : await initOperation({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === undefined, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state, + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + // In the future we can use AbortSignal.any() instead + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal?.aborted) { + abortController.abort(); + } + else if (!abortSignal.aborted) { + inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await delay(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } + finally { + inputAbortSignal?.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } + else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = undefined; + })), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } + else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await pollOperation({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful, + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + }, + }; + return poller; + }; +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/poller.js.map b/node_modules/@azure/core-lro/dist/esm/poller/poller.js.map new file mode 100644 index 0000000..34902cf --- /dev/null +++ b/node_modules/@azure/core-lro/dist/esm/poller/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC,OAAO,EAAE,gBAAgB,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AACrD,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT;;;;OAIG;IACH,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAAQ;IAC7D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,UAAU,CAAC;IACnD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,SAAS,CAAC;IACjD,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,WAAW,CAAC;IACrD,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC;IAE/C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,UAAU;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,QAAQ;IAC9C,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;IAChD,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,WAAW;CACrD,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,UAAU,iBAAiB,CAC/B,MAAmD;IAKnD,MAAM,EACJ,oBAAoB,EACpB,4BAA4B,EAC5B,yBAAyB,EACzB,gBAAgB,EAChB,mBAAmB,EACnB,kBAAkB,EAClB,QAAQ,EACR,qBAAqB,GACtB,GAAG,MAAM,CAAC;IACX,OAAO,KAAK,EACV,EAAE,IAAI,EAAE,IAAI,EAA2D,EACvE,OAAyD,EACzD,EAAE;QACF,MAAM,EACJ,aAAa,EACb,WAAW,EACX,qBAAqB,EAAE,6BAA6B,EACpD,YAAY,GAAG,mBAAmB,EAClC,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,MAAM,qBAAqB,GAAG,6BAA6B;YACzD,CAAC,CAAC,CAAC,GAAG,EAAE;gBACJ,IAAI,MAAM,GAAG,KAAK,CAAC;gBACnB,OAAO,CAAC,iBAAyB,EAAE,SAAkB,EAAE,EAAE;oBACvD,IAAI,SAAS;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;yBAC3D,IAAI,CAAC,MAAM;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;oBACnE,MAAM,GAAG,IAAI,CAAC;gBAChB,CAAC,CAAC;YACJ,CAAC,CAAC,EAAE;YACN,CAAC,CAAC,SAAS,CAAC;QACd,MAAM,KAAK,GAAqC,WAAW;YACzD,CAAC,CAAC,gBAAgB,CAAC,WAAW,CAAC;YAC/B,CAAC,CAAC,MAAM,aAAa,CAAC;gBAClB,IAAI;gBACJ,UAAU;gBACV,aAAa;gBACb,kBAAkB,EAAE,4BAA4B;gBAChD,qBAAqB;gBACrB,gBAAgB,EAAE,CAAC,qBAAqB;aACzC,CAAC,CAAC;QACP,IAAI,aAA2C,CAAC;QAChD,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAG9C,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAmB,CAAC;QAC5C,MAAM,oBAAoB,GAAG,KAAK,IAAmB,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;QAC1F,MAAM,YAAY,GAAG,wBAAwB,CAAC;QAC9C,IAAI,uBAAuB,GAAG,YAAY,CAAC;QAE3C,MAAM,MAAM,GAAsC;YAChD,iBAAiB,EAAE,GAAG,EAAE,CAAC,KAAK;YAC9B,SAAS,EAAE,GAAG,EAAE,CAAC,KAAK,CAAC,MAAM;YAC7B,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,WAAW,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC;YACxE,SAAS,EAAE,GAAG,EAAE,CAAC,aAAa,KAAK,SAAS;YAC5C,WAAW,EAAE,GAAG,EAAE;gBAChB,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC;YACD,QAAQ,EAAE,GAAG,EAAE,CACb,IAAI,CAAC,SAAS,CAAC;gBACb,KAAK;aACN,CAAC;YACJ,UAAU,EAAE,CAAC,QAAiC,EAAE,EAAE;gBAChD,MAAM,CAAC,GAAG,MAAM,EAAE,CAAC;gBACnB,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;gBAC1B,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;YACD,aAAa,EAAE,CAAC,WAA+C,EAAE,EAAE,CACjE,CAAC,aAAa,KAAK,CAAC,KAAK,IAAI,EAAE;gBAC7B,MAAM,EAAE,WAAW,EAAE,gBAAgB,EAAE,GAAG,WAAW,IAAI,EAAE,CAAC;gBAC5D,qDAAqD;gBACrD,SAAS,aAAa;oBACpB,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;gBACD,MAAM,WAAW,GAAG,eAAe,CAAC,MAAM,CAAC;gBAC3C,IAAI,gBAAgB,EAAE,OAAO,EAAE,CAAC;oBAC9B,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;qBAAM,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;oBAChC,gBAAgB,EAAE,gBAAgB,CAAC,OAAO,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBAED,IAAI,CAAC;oBACH,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;wBACrB,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACnC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;4BACxB,MAAM,KAAK,CAAC,uBAAuB,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;4BACtD,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACrC,CAAC;oBACH,CAAC;gBACH,CAAC;wBAAS,CAAC;oBACT,gBAAgB,EAAE,mBAAmB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;gBAChE,CAAC;gBACD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;gBACvC,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;wBACvC,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;wBACpB,KAAK,YAAY,CAAC;wBAClB,KAAK,SAAS;4BACZ,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;oBACvE,CAAC;gBACH,CAAC;YACH,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE;gBAChB,aAAa,GAAG,SAAS,CAAC;YAC5B,CAAC,CAAC,CAAC;YACL,KAAK,CAAC,IAAI,CAAC,WAA+C;gBACxD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,IAAI,MAAM,CAAC,MAAM,EAAE;wBAAE,OAAO;gBAC9B,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO;wBACT,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,CAAC;oBAClB,IAAI;oBACJ,KAAK;oBACL,UAAU;oBACV,oBAAoB;oBACpB,gBAAgB;oBAChB,qBAAqB;oBACrB,kBAAkB;oBAClB,kBAAkB,EAAE,yBAAyB;oBAC7C,mBAAmB;oBACnB,aAAa;oBACb,QAAQ;oBACR,WAAW;oBACX,OAAO,EAAE,WAAW;oBACpB,QAAQ,EAAE,CAAC,gBAAgB,EAAE,EAAE;wBAC7B,uBAAuB,GAAG,gBAAgB,CAAC;oBAC7C,CAAC;oBACD,gBAAgB,EAAE,CAAC,qBAAqB;iBACzC,CAAC,CAAC;gBACH,MAAM,oBAAoB,EAAE,CAAC;gBAC7B,IAAI,CAAC,qBAAqB,EAAE,CAAC;oBAC3B,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;YACH,CAAC;SACF,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n BuildCreatePollerOptions,\n CreatePollerOptions,\n Operation,\n OperationState,\n RestorableOperationState,\n SimplePollerLike,\n StateProxy,\n} from \"./models.js\";\nimport { deserializeState, initOperation, pollOperation } from \"./operation.js\";\nimport { POLL_INTERVAL_IN_MS } from \"./constants.js\";\nimport { delay } from \"@azure/core-util\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n /**\n * The state at this point is created to be of type OperationState.\n * It will be updated later to be of type TState when the\n * customer-provided callback, `updateState`, is called during polling.\n */\n initState: (config) => ({ status: \"running\", config }) as any,\n setCanceled: (state) => (state.status = \"canceled\"),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.status = \"running\"),\n setSucceeded: (state) => (state.status = \"succeeded\"),\n setFailed: (state) => (state.status = \"failed\"),\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => state.status === \"canceled\",\n isFailed: (state) => state.status === \"failed\",\n isRunning: (state) => state.status === \"running\",\n isSucceeded: (state) => state.status === \"succeeded\",\n});\n\n/**\n * Returns a poller factory.\n */\nexport function buildCreatePoller>(\n inputs: BuildCreatePollerOptions,\n): (\n lro: Operation,\n options?: CreatePollerOptions,\n) => Promise> {\n const {\n getOperationLocation,\n getStatusFromInitialResponse,\n getStatusFromPollResponse,\n isOperationError,\n getResourceLocation,\n getPollingInterval,\n getError,\n resolveOnUnsuccessful,\n } = inputs;\n return async (\n { init, poll }: Operation,\n options?: CreatePollerOptions,\n ) => {\n const {\n processResult,\n updateState,\n withOperationLocation: withOperationLocationCallback,\n intervalInMs = POLL_INTERVAL_IN_MS,\n restoreFrom,\n } = options || {};\n const stateProxy = createStateProxy();\n const withOperationLocation = withOperationLocationCallback\n ? (() => {\n let called = false;\n return (operationLocation: string, isUpdated: boolean) => {\n if (isUpdated) withOperationLocationCallback(operationLocation);\n else if (!called) withOperationLocationCallback(operationLocation);\n called = true;\n };\n })()\n : undefined;\n const state: RestorableOperationState = restoreFrom\n ? deserializeState(restoreFrom)\n : await initOperation({\n init,\n stateProxy,\n processResult,\n getOperationStatus: getStatusFromInitialResponse,\n withOperationLocation,\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n let resultPromise: Promise | undefined;\n const abortController = new AbortController();\n // Progress handlers\n type Handler = (state: TState) => void;\n const handlers = new Map();\n const handleProgressEvents = async (): Promise => handlers.forEach((h) => h(state));\n const cancelErrMsg = \"Operation was canceled\";\n let currentPollIntervalInMs = intervalInMs;\n\n const poller: SimplePollerLike = {\n getOperationState: () => state,\n getResult: () => state.result,\n isDone: () => [\"succeeded\", \"failed\", \"canceled\"].includes(state.status),\n isStopped: () => resultPromise === undefined,\n stopPolling: () => {\n abortController.abort();\n },\n toString: () =>\n JSON.stringify({\n state,\n }),\n onProgress: (callback: (state: TState) => void) => {\n const s = Symbol();\n handlers.set(s, callback);\n return () => handlers.delete(s);\n },\n pollUntilDone: (pollOptions?: { abortSignal?: AbortSignalLike }) =>\n (resultPromise ??= (async () => {\n const { abortSignal: inputAbortSignal } = pollOptions || {};\n // In the future we can use AbortSignal.any() instead\n function abortListener(): void {\n abortController.abort();\n }\n const abortSignal = abortController.signal;\n if (inputAbortSignal?.aborted) {\n abortController.abort();\n } else if (!abortSignal.aborted) {\n inputAbortSignal?.addEventListener(\"abort\", abortListener, { once: true });\n }\n\n try {\n if (!poller.isDone()) {\n await poller.poll({ abortSignal });\n while (!poller.isDone()) {\n await delay(currentPollIntervalInMs, { abortSignal });\n await poller.poll({ abortSignal });\n }\n }\n } finally {\n inputAbortSignal?.removeEventListener(\"abort\", abortListener);\n }\n if (resolveOnUnsuccessful) {\n return poller.getResult() as TResult;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return poller.getResult() as TResult;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n case \"notStarted\":\n case \"running\":\n throw new Error(`Polling completed without succeeding or failing`);\n }\n }\n })().finally(() => {\n resultPromise = undefined;\n })),\n async poll(pollOptions?: { abortSignal?: AbortSignalLike }): Promise {\n if (resolveOnUnsuccessful) {\n if (poller.isDone()) return;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n await pollOperation({\n poll,\n state,\n stateProxy,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n getOperationStatus: getStatusFromPollResponse,\n getResourceLocation,\n processResult,\n getError,\n updateState,\n options: pollOptions,\n setDelay: (pollIntervalInMs) => {\n currentPollIntervalInMs = pollIntervalInMs;\n },\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n await handleProgressEvents();\n if (!resolveOnUnsuccessful) {\n switch (state.status) {\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n },\n };\n return poller;\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/models.d.ts b/node_modules/@azure/core-lro/dist/react-native/http/models.d.ts new file mode 100644 index 0000000..c6ebb32 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/models.d.ts @@ -0,0 +1,106 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { LroError } from "../poller/models.js"; +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; +/** + * The type of a LRO response body. This is just a convenience type for checking the status of the operation. + */ +export interface ResponseBody extends Record { + /** The status of the operation. */ + status?: unknown; + /** The state of the provisioning process */ + provisioningState?: unknown; + /** The properties of the provisioning process */ + properties?: { + provisioningState?: unknown; + } & Record; + /** The error if the operation failed */ + error?: Partial; + /** The location of the created resource */ + resourceLocation?: string; +} +/** + * Simple type of the raw response. + */ +export interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} +/** + * The type of the response of a LRO. + */ +export interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} +/** + * Description of a long running operation. + */ +export interface LongRunningOperation { + /** + * The request path. This should be set if the operation is a PUT and needs + * to poll from the same request path. + */ + requestPath?: string; + /** + * The HTTP request method. This should be set if the operation is a PUT or a + * DELETE. + */ + requestMethod?: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string, options?: { + abortSignal?: AbortSignalLike; + }) => Promise>; +} +export type HttpOperationMode = "OperationLocation" | "ResourceLocation" | "Body"; +/** + * Options for `createPoller`. + */ +export interface CreateHttpPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + resourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, response: LroResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/models.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/http/models.d.ts.map new file mode 100644 index 0000000..8c69ed8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAG/C;;GAEG;AACH,MAAM,MAAM,yBAAyB,GAAG,uBAAuB,GAAG,UAAU,GAAG,cAAc,CAAC;AAE9F;;GAEG;AAEH,MAAM,WAAW,YAAa,SAAQ,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAC3D,mCAAmC;IACnC,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,4CAA4C;IAC5C,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,iDAAiD;IACjD,UAAU,CAAC,EAAE;QAAE,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvE,wCAAwC;IACxC,KAAK,CAAC,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC;IAC1B,2CAA2C;IAC3C,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,2BAA2B;IAC3B,UAAU,EAAE,MAAM,CAAC;IACnB,iJAAiJ;IACjJ,OAAO,EAAE;QACP,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAC;KAC9B,CAAC;IACF,+BAA+B;IAC/B,IAAI,CAAC,EAAE,OAAO,CAAC;CAChB;AAGD;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,CAAC,GAAG,OAAO;IACtC,6BAA6B;IAC7B,YAAY,EAAE,CAAC,CAAC;IAChB,uBAAuB;IACvB,WAAW,EAAE,WAAW,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB,CAAC,CAAC,GAAG,OAAO;IAC/C;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,kBAAkB,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACxD;;OAEG;IACH,eAAe,EAAE,CACf,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,KACxC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;CAC9B;AAED,MAAM,MAAM,iBAAiB,GAAG,mBAAmB,GAAG,kBAAkB,GAAG,MAAM,CAAC;AAElF;;GAEG;AACH,MAAM,WAAW,uBAAuB,CAAC,OAAO,EAAE,MAAM;IACtD;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,WAAW,KAAK,IAAI,CAAC;IAC7D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;IAC5D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/models.js b/node_modules/@azure/core-lro/dist/react-native/http/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/models.js.map b/node_modules/@azure/core-lro/dist/react-native/http/models.js.map new file mode 100644 index 0000000..ccc0d92 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/http/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { LroError } from \"../poller/models.js\";\n\n// TODO: rename to ResourceLocationConfig\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface ResponseBody extends Record {\n /** The status of the operation. */\n status?: unknown;\n /** The state of the provisioning process */\n provisioningState?: unknown;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: unknown } & Record;\n /** The error if the operation failed */\n error?: Partial;\n /** The location of the created resource */\n resourceLocation?: string;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n// TODO: rename to OperationResponse\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path. This should be set if the operation is a PUT and needs\n * to poll from the same request path.\n */\n requestPath?: string;\n /**\n * The HTTP request method. This should be set if the operation is a PUT or a\n * DELETE.\n */\n requestMethod?: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (\n path: string,\n options?: { abortSignal?: AbortSignalLike },\n ) => Promise>;\n}\n\nexport type HttpOperationMode = \"OperationLocation\" | \"ResourceLocation\" | \"Body\";\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreateHttpPollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n resourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, response: LroResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/operation.d.ts b/node_modules/@azure/core-lro/dist/react-native/http/operation.d.ts new file mode 100644 index 0000000..c06358e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/operation.d.ts @@ -0,0 +1,47 @@ +import { HttpOperationMode, LongRunningOperation, LroResourceLocationConfig, LroResponse, RawResponse } from "./models.js"; +import { LroError, OperationConfig, OperationStatus, RestorableOperationState, StateProxy } from "../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +export declare function inferLroMode(inputs: { + rawResponse: RawResponse; + requestPath?: string; + requestMethod?: string; + resourceLocationConfig?: LroResourceLocationConfig; +}): (OperationConfig & { + mode: HttpOperationMode; +}) | undefined; +export declare function parseRetryAfter({ rawResponse }: LroResponse): number | undefined; +export declare function getErrorFromResponse(response: LroResponse): LroError | undefined; +export declare function getStatusFromInitialResponse(inputs: { + response: LroResponse; + state: RestorableOperationState; + operationLocation?: string; +}): OperationStatus; +/** + * Initiates the long-running operation. + */ +export declare function initHttpOperation(inputs: { + stateProxy: StateProxy; + resourceLocationConfig?: LroResourceLocationConfig; + processResult?: (result: unknown, state: TState) => TResult; + setErrorAsResult: boolean; + lro: LongRunningOperation; +}): Promise>; +export declare function getOperationLocation({ rawResponse }: LroResponse, state: RestorableOperationState): string | undefined; +export declare function getOperationStatus({ rawResponse }: LroResponse, state: RestorableOperationState): OperationStatus; +export declare function getResourceLocation(res: LroResponse, state: RestorableOperationState): string | undefined; +export declare function isOperationError(e: Error): boolean; +/** Polls the long-running operation. */ +export declare function pollHttpOperation(inputs: { + lro: LongRunningOperation; + stateProxy: StateProxy; + processResult?: (result: unknown, state: TState) => TResult; + updateState?: (state: TState, lastResponse: LroResponse) => void; + isDone?: (lastResponse: LroResponse, state: TState) => boolean; + setDelay: (intervalInMs: number) => void; + options?: { + abortSignal?: AbortSignalLike; + }; + state: RestorableOperationState; + setErrorAsResult: boolean; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/operation.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/http/operation.d.ts.map new file mode 100644 index 0000000..39d679a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,yBAAyB,EACzB,WAAW,EACX,WAAW,EAEZ,MAAM,aAAa,CAAC;AACrB,OAAO,EACL,QAAQ,EACR,eAAe,EACf,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AA6D1D,wBAAgB,YAAY,CAAC,MAAM,EAAE;IACnC,WAAW,EAAE,WAAW,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;CACpD,GAAG,CAAC,eAAe,GAAG;IAAE,IAAI,EAAE,iBAAiB,CAAA;CAAE,CAAC,GAAG,SAAS,CA+B9D;AAqDD,wBAAgB,eAAe,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,SAAS,CAUtF;AAED,wBAAgB,oBAAoB,CAAC,CAAC,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAC,CAAC,GAAG,QAAQ,GAAG,SAAS,CAetF;AAWD,wBAAgB,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE;IAC3D,QAAQ,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B,GAAG,eAAe,CAelB;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IAC/D,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,sBAAsB,CAAC,EAAE,yBAAyB,CAAC;IACnD,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,GAAG,EAAE,oBAAoB,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAyB5C;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EACzC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAiBpB;AAED,wBAAgB,kBAAkB,CAAC,MAAM,EACvC,EAAE,WAAW,EAAE,EAAE,WAAW,EAC5B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,eAAe,CAejB;AASD,wBAAgB,mBAAmB,CAAC,MAAM,EACxC,GAAG,EAAE,WAAW,EAChB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,GACtC,MAAM,GAAG,SAAS,CAMpB;AAED,wBAAgB,gBAAgB,CAAC,CAAC,EAAE,KAAK,GAAG,OAAO,CAElD;AAED,wCAAwC;AACxC,wBAAsB,iBAAiB,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE;IAC/D,GAAG,EAAE,oBAAoB,CAAC;IAC1B,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC/D,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,CAAC;IAC5C,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,IAAI,CAAC,CAkChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/operation.js b/node_modules/@azure/core-lro/dist/react-native/http/operation.js new file mode 100644 index 0000000..0383844 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/operation.js @@ -0,0 +1,280 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { initOperation, pollOperation } from "../poller/operation.js"; +import { logger } from "../logger.js"; +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation ?? azureAsyncOperation; +} +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(inputs) { + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return getDefault() ?? requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } +} +export function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; + } + else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath, + }; + } + else { + return undefined; + } +} +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status?.toLocaleLowerCase()) { + case undefined: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } +} +function getStatus(rawResponse) { + const { status } = rawResponse.body ?? {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function getProvisioningState(rawResponse) { + const { properties, provisioningState } = rawResponse.body ?? {}; + const status = properties?.provisioningState ?? provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } + else if (statusCode < 300) { + return "succeeded"; + } + else { + return "failed"; + } +} +export function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter)) + : retryAfterInSeconds * 1000; + } + return undefined; +} +export function getErrorFromResponse(response) { + const error = accessBodyProperty(response, "error"); + if (!error) { + logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; +} +function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return undefined; +} +export function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} +/** + * Initiates the long-running operation. + */ +export async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return initOperation({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + stateProxy, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, + }); +} +export function getOperationLocation({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return undefined; + } + } +} +export function getOperationStatus({ rawResponse }, state) { + const mode = state.config.metadata?.["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } +} +function accessBodyProperty({ flatResponse, rawResponse }, prop) { + return flatResponse?.[prop] ?? rawResponse.body?.[prop]; +} +export function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; +} +export function isOperationError(e) { + return e.name === "RestError"; +} +/** Polls the long-running operation. */ +export async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; + return pollOperation({ + state, + stateProxy, + setDelay, + processResult: processResult + ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) + : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, + }); +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/operation.js.map b/node_modules/@azure/core-lro/dist/react-native/http/operation.js.map new file mode 100644 index 0000000..fa0dde7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/http/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAiBlC,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,wBAAwB,CAAC;AAEtE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,SAAS,8BAA8B,CAAC,MAGvC;IACC,MAAM,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IAC1D,OAAO,iBAAiB,IAAI,mBAAmB,CAAC;AAClD,CAAC;AAED,SAAS,iBAAiB,CAAC,WAAwB;IACjD,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,0BAA0B,CAAC,WAAwB;IAC1D,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,4BAA4B,CAAC,WAAwB;IAC5D,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAAC,MAK7B;IACC,MAAM,EAAE,QAAQ,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IAChF,QAAQ,aAAa,EAAE,CAAC;QACtB,KAAK,KAAK,CAAC,CAAC,CAAC;YACX,OAAO,WAAW,CAAC;QACrB,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,KAAK,OAAO,CAAC,CAAC,CAAC;YACb,OAAO,UAAU,EAAE,IAAI,WAAW,CAAC;QACrC,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,UAAU,EAAE,CAAC;QACtB,CAAC;IACH,CAAC;IAED,SAAS,UAAU;QACjB,QAAQ,sBAAsB,EAAE,CAAC;YAC/B,KAAK,uBAAuB,CAAC,CAAC,CAAC;gBAC7B,OAAO,SAAS,CAAC;YACnB,CAAC;YACD,KAAK,cAAc,CAAC,CAAC,CAAC;gBACpB,OAAO,WAAW,CAAC;YACrB,CAAC;YACD,KAAK,UAAU,CAAC;YAChB,OAAO,CAAC,CAAC,CAAC;gBACR,OAAO,QAAQ,CAAC;YAClB,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,MAK5B;IACC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,WAAW,EAAE,sBAAsB,EAAE,GAAG,MAAM,CAAC;IACnF,MAAM,iBAAiB,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAC;IAClE,MAAM,mBAAmB,GAAG,4BAA4B,CAAC,WAAW,CAAC,CAAC;IACtE,MAAM,UAAU,GAAG,8BAA8B,CAAC,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC9F,MAAM,QAAQ,GAAG,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,uBAAuB,GAAG,aAAa,EAAE,iBAAiB,EAAE,CAAC;IACnE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,iBAAiB,EAAE,UAAU;YAC7B,gBAAgB,EAAE,oBAAoB,CAAC;gBACrC,aAAa,EAAE,uBAAuB;gBACtC,QAAQ;gBACR,WAAW;gBACX,sBAAsB;aACvB,CAAC;SACH,CAAC;IACJ,CAAC;SAAM,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,kBAAkB;YACxB,iBAAiB,EAAE,QAAQ;SAC5B,CAAC;IACJ,CAAC;SAAM,IAAI,uBAAuB,KAAK,KAAK,IAAI,WAAW,EAAE,CAAC;QAC5D,OAAO;YACL,IAAI,EAAE,MAAM;YACZ,iBAAiB,EAAE,WAAW;SAC/B,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,OAAO,SAAS,CAAC;IACnB,CAAC;AACH,CAAC;AAED,SAAS,eAAe,CAAC,MAA+C;IACtE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,MAAM,CAAC;IACtC,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACvD,MAAM,IAAI,KAAK,CACb,oGAAoG,MAAM,sIAAsI,CACjP,CAAC;IACJ,CAAC;IACD,QAAQ,MAAM,EAAE,iBAAiB,EAAE,EAAE,CAAC;QACpC,KAAK,SAAS;YACZ,OAAO,iBAAiB,CAAC,UAAU,CAAC,CAAC;QACvC,KAAK,WAAW;YACd,OAAO,WAAW,CAAC;QACrB,KAAK,QAAQ;YACX,OAAO,QAAQ,CAAC;QAClB,KAAK,SAAS,CAAC;QACf,KAAK,UAAU,CAAC;QAChB,KAAK,SAAS,CAAC;QACf,KAAK,WAAW,CAAC;QACjB,KAAK,YAAY;YACf,OAAO,SAAS,CAAC;QACnB,KAAK,UAAU,CAAC;QAChB,KAAK,WAAW;YACd,OAAO,UAAU,CAAC;QACpB,OAAO,CAAC,CAAC,CAAC;YACR,MAAM,CAAC,OAAO,CAAC,uCAAuC,MAAM,EAAE,CAAC,CAAC;YAChE,OAAO,MAAyB,CAAC;QACnC,CAAC;IACH,CAAC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,WAAwB;IACzC,MAAM,EAAE,MAAM,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IAC5D,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB;IACpD,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAI,WAAW,CAAC,IAAqB,IAAI,EAAE,CAAC;IACnF,MAAM,MAAM,GAAG,UAAU,EAAE,iBAAiB,IAAI,iBAAiB,CAAC;IAClE,OAAO,eAAe,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB;IAC3C,IAAI,UAAU,KAAK,GAAG,EAAE,CAAC;QACvB,OAAO,SAAS,CAAC;IACnB,CAAC;SAAM,IAAI,UAAU,GAAG,GAAG,EAAE,CAAC;QAC5B,OAAO,WAAW,CAAC;IACrB,CAAC;SAAM,CAAC;QACN,OAAO,QAAQ,CAAC;IAClB,CAAC;AACH,CAAC;AAED,MAAM,UAAU,eAAe,CAAI,EAAE,WAAW,EAAkB;IAChE,MAAM,UAAU,GAAuB,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;IAC1E,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;QAC7B,wEAAwE;QACxE,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;QACjD,OAAO,KAAK,CAAC,mBAAmB,CAAC;YAC/B,CAAC,CAAC,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC;YACxD,CAAC,CAAC,mBAAmB,GAAG,IAAI,CAAC;IACjC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAI,QAAwB;IAC9D,MAAM,KAAK,GAAG,kBAAkB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACpD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,MAAM,CAAC,OAAO,CACZ,yFAAyF,CAC1F,CAAC;QACF,OAAO;IACT,CAAC;IACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC;QAClC,MAAM,CAAC,OAAO,CACZ,iHAAiH,CAClH,CAAC;QACF,OAAO;IACT,CAAC;IACD,OAAO,KAAiB,CAAC;AAC3B,CAAC;AAED,SAAS,gCAAgC,CAAC,cAAoB;IAC5D,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE,CAAC;QAC7B,OAAO,cAAc,GAAG,OAAO,CAAC;IAClC,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,4BAA4B,CAAS,MAIpD;IACC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IACtD,SAAS,MAAM;QACb,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;QAC7C,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,SAAS;gBACZ,OAAO,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC5D,KAAK,MAAM;gBACT,OAAO,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YAC7C;gBACE,OAAO,SAAS,CAAC;QACrB,CAAC;IACH,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC;IACxB,OAAO,MAAM,KAAK,SAAS,IAAI,iBAAiB,KAAK,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC;AACxF,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAkB,MAMxD;IACC,MAAM,EAAE,UAAU,EAAE,sBAAsB,EAAE,aAAa,EAAE,GAAG,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IAC5F,OAAO,aAAa,CAAC;QACnB,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,UAAU;QACV,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,kBAAkB,EAAE,4BAA4B;QAChD,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,8BAA8B,CAAC;gBACpC,iBAAiB,EAAE,0BAA0B,CAAC,WAAW,CAAC;gBAC1D,mBAAmB,EAAE,4BAA4B,CAAC,WAAW,CAAC;aAC/D,CAAC,CAAC;QACL,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,CAAC;QACxC,CAAC;QACD,KAAK,MAAM,CAAC;QACZ,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,SAAS,CAAC;QACnB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,kBAAkB,CAChC,EAAE,WAAW,EAAe,EAC5B,KAAuC;IAEvC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7C,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,OAAO,SAAS,CAAC,WAAW,CAAC,CAAC;QAChC,CAAC;QACD,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,OAAO,iBAAiB,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,OAAO,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAC3C,CAAC;QACD;YACE,MAAM,IAAI,KAAK,CAAC,8CAA8C,IAAI,EAAE,CAAC,CAAC;IAC1E,CAAC;AACH,CAAC;AAED,SAAS,kBAAkB,CACzB,EAAE,YAAY,EAAE,WAAW,EAAe,EAC1C,IAAO;IAEP,OAAQ,YAA6B,EAAE,CAAC,IAAI,CAAC,IAAK,WAAW,CAAC,IAAqB,EAAE,CAAC,IAAI,CAAC,CAAC;AAC9F,CAAC;AAED,MAAM,UAAU,mBAAmB,CACjC,GAAgB,EAChB,KAAuC;IAEvC,MAAM,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC;IACxD,IAAI,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE,CAAC;QACnC,KAAK,CAAC,MAAM,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACtC,CAAC;IACD,OAAO,KAAK,CAAC,MAAM,CAAC,gBAAgB,CAAC;AACvC,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,CAAQ;IACvC,OAAO,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC;AAChC,CAAC;AAED,wCAAwC;AACxC,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAkB,MAUxD;IACC,MAAM,EACJ,GAAG,EACH,UAAU,EACV,OAAO,EACP,aAAa,EACb,WAAW,EACX,QAAQ,EACR,KAAK,EACL,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,OAAO,aAAa,CAAC;QACnB,KAAK;QACL,UAAU;QACV,QAAQ;QACR,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,UAAU,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,UAAU,CAAC;YAC3E,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;QACjD,QAAQ,EAAE,oBAAoB;QAC9B,WAAW;QACX,kBAAkB,EAAE,eAAe;QACnC,oBAAoB;QACpB,kBAAkB;QAClB,gBAAgB;QAChB,mBAAmB;QACnB,OAAO;QACP;;;WAGG;QACH,IAAI,EAAE,KAAK,EAAE,QAAgB,EAAE,YAAgD,EAAE,EAAE,CACjF,GAAG,CAAC,eAAe,CAAC,QAAQ,EAAE,YAAY,CAAC;QAC7C,gBAAgB;KACjB,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n HttpOperationMode,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n RawResponse,\n ResponseBody,\n} from \"./models.js\";\nimport {\n LroError,\n OperationConfig,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"../poller/models.js\";\nimport { initOperation, pollOperation } from \"../poller/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { logger } from \"../logger.js\";\n\nfunction getOperationLocationPollingUrl(inputs: {\n operationLocation?: string;\n azureAsyncOperation?: string;\n}): string | undefined {\n const { azureAsyncOperation, operationLocation } = inputs;\n return operationLocation ?? azureAsyncOperation;\n}\n\nfunction getLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperationHeader(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(inputs: {\n requestMethod?: string;\n location?: string;\n requestPath?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): string | undefined {\n const { location, requestMethod, requestPath, resourceLocationConfig } = inputs;\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"DELETE\": {\n return undefined;\n }\n case \"PATCH\": {\n return getDefault() ?? requestPath;\n }\n default: {\n return getDefault();\n }\n }\n\n function getDefault() {\n switch (resourceLocationConfig) {\n case \"azure-async-operation\": {\n return undefined;\n }\n case \"original-uri\": {\n return requestPath;\n }\n case \"location\":\n default: {\n return location;\n }\n }\n }\n}\n\nexport function inferLroMode(inputs: {\n rawResponse: RawResponse;\n requestPath?: string;\n requestMethod?: string;\n resourceLocationConfig?: LroResourceLocationConfig;\n}): (OperationConfig & { mode: HttpOperationMode }) | undefined {\n const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs;\n const operationLocation = getOperationLocationHeader(rawResponse);\n const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse);\n const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation });\n const location = getLocationHeader(rawResponse);\n const normalizedRequestMethod = requestMethod?.toLocaleUpperCase();\n if (pollingUrl !== undefined) {\n return {\n mode: \"OperationLocation\",\n operationLocation: pollingUrl,\n resourceLocation: findResourceLocation({\n requestMethod: normalizedRequestMethod,\n location,\n requestPath,\n resourceLocationConfig,\n }),\n };\n } else if (location !== undefined) {\n return {\n mode: \"ResourceLocation\",\n operationLocation: location,\n };\n } else if (normalizedRequestMethod === \"PUT\" && requestPath) {\n return {\n mode: \"Body\",\n operationLocation: requestPath,\n };\n } else {\n return undefined;\n }\n}\n\nfunction transformStatus(inputs: { status: unknown; statusCode: number }): OperationStatus {\n const { status, statusCode } = inputs;\n if (typeof status !== \"string\" && status !== undefined) {\n throw new Error(\n `Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`,\n );\n }\n switch (status?.toLocaleLowerCase()) {\n case undefined:\n return toOperationStatus(statusCode);\n case \"succeeded\":\n return \"succeeded\";\n case \"failed\":\n return \"failed\";\n case \"running\":\n case \"accepted\":\n case \"started\":\n case \"canceling\":\n case \"cancelling\":\n return \"running\";\n case \"canceled\":\n case \"cancelled\":\n return \"canceled\";\n default: {\n logger.verbose(`LRO: unrecognized operation status: ${status}`);\n return status as OperationStatus;\n }\n }\n}\n\nfunction getStatus(rawResponse: RawResponse): OperationStatus {\n const { status } = (rawResponse.body as ResponseBody) ?? {};\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction getProvisioningState(rawResponse: RawResponse): OperationStatus {\n const { properties, provisioningState } = (rawResponse.body as ResponseBody) ?? {};\n const status = properties?.provisioningState ?? provisioningState;\n return transformStatus({ status, statusCode: rawResponse.statusCode });\n}\n\nfunction toOperationStatus(statusCode: number): OperationStatus {\n if (statusCode === 202) {\n return \"running\";\n } else if (statusCode < 300) {\n return \"succeeded\";\n } else {\n return \"failed\";\n }\n}\n\nexport function parseRetryAfter({ rawResponse }: LroResponse): number | undefined {\n const retryAfter: string | undefined = rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n return isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter))\n : retryAfterInSeconds * 1000;\n }\n return undefined;\n}\n\nexport function getErrorFromResponse(response: LroResponse): LroError | undefined {\n const error = accessBodyProperty(response, \"error\");\n if (!error) {\n logger.warning(\n `The long-running operation failed but there is no error property in the response's body`,\n );\n return;\n }\n if (!error.code || !error.message) {\n logger.warning(\n `The long-running operation failed but the error property in the response's body doesn't contain code or message`,\n );\n return;\n }\n return error as LroError;\n}\n\nfunction calculatePollingIntervalFromDate(retryAfterDate: Date): number | undefined {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return undefined;\n}\n\nexport function getStatusFromInitialResponse(inputs: {\n response: LroResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n}): OperationStatus {\n const { response, state, operationLocation } = inputs;\n function helper(): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case undefined:\n return toOperationStatus(response.rawResponse.statusCode);\n case \"Body\":\n return getOperationStatus(response, state);\n default:\n return \"running\";\n }\n }\n const status = helper();\n return status === \"running\" && operationLocation === undefined ? \"succeeded\" : status;\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initHttpOperation(inputs: {\n stateProxy: StateProxy;\n resourceLocationConfig?: LroResourceLocationConfig;\n processResult?: (result: unknown, state: TState) => TResult;\n setErrorAsResult: boolean;\n lro: LongRunningOperation;\n}): Promise> {\n const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs;\n return initOperation({\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n stateProxy,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n getOperationStatus: getStatusFromInitialResponse,\n setErrorAsResult,\n });\n}\n\nexport function getOperationLocation(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getOperationLocationPollingUrl({\n operationLocation: getOperationLocationHeader(rawResponse),\n azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse),\n });\n }\n case \"ResourceLocation\": {\n return getLocationHeader(rawResponse);\n }\n case \"Body\":\n default: {\n return undefined;\n }\n }\n}\n\nexport function getOperationStatus(\n { rawResponse }: LroResponse,\n state: RestorableOperationState,\n): OperationStatus {\n const mode = state.config.metadata?.[\"mode\"];\n switch (mode) {\n case \"OperationLocation\": {\n return getStatus(rawResponse);\n }\n case \"ResourceLocation\": {\n return toOperationStatus(rawResponse.statusCode);\n }\n case \"Body\": {\n return getProvisioningState(rawResponse);\n }\n default:\n throw new Error(`Internal error: Unexpected operation mode: ${mode}`);\n }\n}\n\nfunction accessBodyProperty

(\n { flatResponse, rawResponse }: LroResponse,\n prop: P,\n): ResponseBody[P] {\n return (flatResponse as ResponseBody)?.[prop] ?? (rawResponse.body as ResponseBody)?.[prop];\n}\n\nexport function getResourceLocation(\n res: LroResponse,\n state: RestorableOperationState,\n): string | undefined {\n const loc = accessBodyProperty(res, \"resourceLocation\");\n if (loc && typeof loc === \"string\") {\n state.config.resourceLocation = loc;\n }\n return state.config.resourceLocation;\n}\n\nexport function isOperationError(e: Error): boolean {\n return e.name === \"RestError\";\n}\n\n/** Polls the long-running operation. */\nexport async function pollHttpOperation(inputs: {\n lro: LongRunningOperation;\n stateProxy: StateProxy;\n processResult?: (result: unknown, state: TState) => TResult;\n updateState?: (state: TState, lastResponse: LroResponse) => void;\n isDone?: (lastResponse: LroResponse, state: TState) => boolean;\n setDelay: (intervalInMs: number) => void;\n options?: { abortSignal?: AbortSignalLike };\n state: RestorableOperationState;\n setErrorAsResult: boolean;\n}): Promise {\n const {\n lro,\n stateProxy,\n options,\n processResult,\n updateState,\n setDelay,\n state,\n setErrorAsResult,\n } = inputs;\n return pollOperation({\n state,\n stateProxy,\n setDelay,\n processResult: processResult\n ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState)\n : ({ flatResponse }) => flatResponse as TResult,\n getError: getErrorFromResponse,\n updateState,\n getPollingInterval: parseRetryAfter,\n getOperationLocation,\n getOperationStatus,\n isOperationError,\n getResourceLocation,\n options,\n /**\n * The expansion here is intentional because `lro` could be an object that\n * references an inner this, so we need to preserve a reference to it.\n */\n poll: async (location: string, inputOptions?: { abortSignal?: AbortSignalLike }) =>\n lro.sendPollRequest(location, inputOptions),\n setErrorAsResult,\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/poller.d.ts b/node_modules/@azure/core-lro/dist/react-native/http/poller.d.ts new file mode 100644 index 0000000..5199c5c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/poller.d.ts @@ -0,0 +1,11 @@ +import { LongRunningOperation } from "./models.js"; +import { OperationState, SimplePollerLike } from "../poller/models.js"; +import { CreateHttpPollerOptions } from "./models.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export declare function createHttpPoller>(lro: LongRunningOperation, options?: CreateHttpPollerOptions): Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/poller.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/http/poller.d.ts.map new file mode 100644 index 0000000..5f6cfe9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAe,MAAM,aAAa,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAWvE,OAAO,EAAE,uBAAuB,EAAE,MAAM,aAAa,CAAC;AAGtD;;;;;GAKG;AACH,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EACpF,GAAG,EAAE,oBAAoB,EACzB,OAAO,CAAC,EAAE,uBAAuB,CAAC,OAAO,EAAE,MAAM,CAAC,GACjD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAgD5C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/poller.js b/node_modules/@azure/core-lro/dist/react-native/http/poller.js new file mode 100644 index 0000000..84379fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/poller.js @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getErrorFromResponse, getOperationLocation, getOperationStatus, getResourceLocation, getStatusFromInitialResponse, inferLroMode, isOperationError, parseRetryAfter, } from "./operation.js"; +import { buildCreatePoller } from "../poller/poller.js"; +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +export async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; + return buildCreatePoller({ + getStatusFromInitialResponse, + getStatusFromPollResponse: getOperationStatus, + isOperationError, + getOperationLocation, + getResourceLocation, + getPollingInterval: parseRetryAfter, + getError: getErrorFromResponse, + resolveOnUnsuccessful, + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return { + response, + operationLocation: config?.operationLocation, + resourceLocation: config?.resourceLocation, + ...(config?.mode ? { metadata: { mode: config.mode } } : {}), + }; + }, + poll: lro.sendPollRequest, + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + }); +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/http/poller.js.map b/node_modules/@azure/core-lro/dist/react-native/http/poller.js.map new file mode 100644 index 0000000..6a95868 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/http/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/http/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EACL,oBAAoB,EACpB,oBAAoB,EACpB,kBAAkB,EAClB,mBAAmB,EACnB,4BAA4B,EAC5B,YAAY,EACZ,gBAAgB,EAChB,eAAe,GAChB,MAAM,gBAAgB,CAAC;AAExB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,GAAyB,EACzB,OAAkD;IAElD,MAAM,EACJ,sBAAsB,EACtB,YAAY,EACZ,aAAa,EACb,WAAW,EACX,WAAW,EACX,qBAAqB,EACrB,qBAAqB,GAAG,KAAK,GAC9B,GAAG,OAAO,IAAI,EAAE,CAAC;IAClB,OAAO,iBAAiB,CAA+B;QACrD,4BAA4B;QAC5B,yBAAyB,EAAE,kBAAkB;QAC7C,gBAAgB;QAChB,oBAAoB;QACpB,mBAAmB;QACnB,kBAAkB,EAAE,eAAe;QACnC,QAAQ,EAAE,oBAAoB;QAC9B,qBAAqB;KACtB,CAAC,CACA;QACE,IAAI,EAAE,KAAK,IAAI,EAAE;YACf,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,kBAAkB,EAAE,CAAC;YAChD,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,aAAa,EAAE,GAAG,CAAC,aAAa;gBAChC,sBAAsB;aACvB,CAAC,CAAC;YACH,OAAO;gBACL,QAAQ;gBACR,iBAAiB,EAAE,MAAM,EAAE,iBAAiB;gBAC5C,gBAAgB,EAAE,MAAM,EAAE,gBAAgB;gBAC1C,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aAC7D,CAAC;QACJ,CAAC;QACD,IAAI,EAAE,GAAG,CAAC,eAAe;KAC1B,EACD;QACE,YAAY;QACZ,qBAAqB;QACrB,WAAW;QACX,WAAW;QACX,aAAa,EAAE,aAAa;YAC1B,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC;YACjE,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,YAAuB;KAClD,CACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResponse } from \"./models.js\";\nimport { OperationState, SimplePollerLike } from \"../poller/models.js\";\nimport {\n getErrorFromResponse,\n getOperationLocation,\n getOperationStatus,\n getResourceLocation,\n getStatusFromInitialResponse,\n inferLroMode,\n isOperationError,\n parseRetryAfter,\n} from \"./operation.js\";\nimport { CreateHttpPollerOptions } from \"./models.js\";\nimport { buildCreatePoller } from \"../poller/poller.js\";\n\n/**\n * Creates a poller that can be used to poll a long-running operation.\n * @param lro - Description of the long-running operation\n * @param options - options to configure the poller\n * @returns an initialized poller\n */\nexport async function createHttpPoller>(\n lro: LongRunningOperation,\n options?: CreateHttpPollerOptions,\n): Promise> {\n const {\n resourceLocationConfig,\n intervalInMs,\n processResult,\n restoreFrom,\n updateState,\n withOperationLocation,\n resolveOnUnsuccessful = false,\n } = options || {};\n return buildCreatePoller({\n getStatusFromInitialResponse,\n getStatusFromPollResponse: getOperationStatus,\n isOperationError,\n getOperationLocation,\n getResourceLocation,\n getPollingInterval: parseRetryAfter,\n getError: getErrorFromResponse,\n resolveOnUnsuccessful,\n })(\n {\n init: async () => {\n const response = await lro.sendInitialRequest();\n const config = inferLroMode({\n rawResponse: response.rawResponse,\n requestPath: lro.requestPath,\n requestMethod: lro.requestMethod,\n resourceLocationConfig,\n });\n return {\n response,\n operationLocation: config?.operationLocation,\n resourceLocation: config?.resourceLocation,\n ...(config?.mode ? { metadata: { mode: config.mode } } : {}),\n };\n },\n poll: lro.sendPollRequest,\n },\n {\n intervalInMs,\n withOperationLocation,\n restoreFrom,\n updateState,\n processResult: processResult\n ? ({ flatResponse }, state) => processResult(flatResponse, state)\n : ({ flatResponse }) => flatResponse as TResult,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/index.d.ts b/node_modules/@azure/core-lro/dist/react-native/index.d.ts new file mode 100644 index 0000000..9783948 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/index.d.ts @@ -0,0 +1,13 @@ +export { createHttpPoller } from "./http/poller.js"; +export { CancelOnProgress, OperationState, OperationStatus, SimplePollerLike, } from "./poller/models.js"; +export { CreateHttpPollerOptions } from "./http/models.js"; +export { LroResourceLocationConfig, LongRunningOperation, LroResponse, RawResponse, } from "./http/models.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +export { PollerLike } from "./legacy/models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/index.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..c096dc1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,cAAc,EACd,eAAe,EACf,gBAAgB,GACjB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC;AAC3D,OAAO,EACL,yBAAyB,EACzB,oBAAoB,EACpB,WAAW,EACX,WAAW,GACZ,MAAM,kBAAkB,CAAC;AAE1B;;GAEG;AAUH,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/index.js b/node_modules/@azure/core-lro/dist/react-native/index.js new file mode 100644 index 0000000..3952c2a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/index.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createHttpPoller } from "./http/poller.js"; +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +// export { +// BuildCreatePollerOptions, +// Operation, +// CreatePollerOptions, +// OperationConfig, +// RestorableOperationState, +// } from "./poller/models"; +// export { buildCreatePoller } from "./poller/poller"; +/** legacy */ +export * from "./legacy/lroEngine/index.js"; +export * from "./legacy/poller.js"; +export * from "./legacy/pollOperation.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/index.js.map b/node_modules/@azure/core-lro/dist/react-native/index.js.map new file mode 100644 index 0000000..689d59e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AAepD;;GAEG;AACH,WAAW;AACX,8BAA8B;AAC9B,eAAe;AACf,yBAAyB;AACzB,qBAAqB;AACrB,8BAA8B;AAC9B,4BAA4B;AAC5B,uDAAuD;AAEvD,aAAa;AACb,cAAc,6BAA6B,CAAC;AAC5C,cAAc,oBAAoB,CAAC;AACnC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { createHttpPoller } from \"./http/poller.js\";\nexport {\n CancelOnProgress,\n OperationState,\n OperationStatus,\n SimplePollerLike,\n} from \"./poller/models.js\";\nexport { CreateHttpPollerOptions } from \"./http/models.js\";\nexport {\n LroResourceLocationConfig,\n LongRunningOperation,\n LroResponse,\n RawResponse,\n} from \"./http/models.js\";\n\n/**\n * This can be uncommented to expose the protocol-agnostic poller\n */\n// export {\n// BuildCreatePollerOptions,\n// Operation,\n// CreatePollerOptions,\n// OperationConfig,\n// RestorableOperationState,\n// } from \"./poller/models\";\n// export { buildCreatePoller } from \"./poller/poller\";\n\n/** legacy */\nexport * from \"./legacy/lroEngine/index.js\";\nexport * from \"./legacy/poller.js\";\nexport * from \"./legacy/pollOperation.js\";\nexport { PollerLike } from \"./legacy/models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.d.ts new file mode 100644 index 0000000..b0d84c8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.d.ts @@ -0,0 +1,3 @@ +export { LroEngine } from "./lroEngine.js"; +export { LroEngineOptions } from "./models.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.d.ts.map new file mode 100644 index 0000000..ebf1159 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.js b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.js new file mode 100644 index 0000000..ec17805 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { LroEngine } from "./lroEngine.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.js.map new file mode 100644 index 0000000..0c4dc7a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { LroEngine } from \"./lroEngine.js\";\nexport { LroEngineOptions } from \"./models.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.d.ts new file mode 100644 index 0000000..937101c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.d.ts @@ -0,0 +1,16 @@ +import { LroEngineOptions } from "./models.js"; +import { LongRunningOperation } from "../../http/models.js"; +import { PollOperationState } from "../pollOperation.js"; +import { Poller } from "../poller.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} +//# sourceMappingURL=lroEngine.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.d.ts.map new file mode 100644 index 0000000..640c27e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAgB,MAAM,aAAa,CAAC;AAE7D,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAE5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAItC;;GAEG;AACH,qBAAa,SAAS,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CAAE,SAAQ,MAAM,CACxF,MAAM,EACN,OAAO,CACR;IACC,OAAO,CAAC,MAAM,CAAe;gBAEjB,GAAG,EAAE,oBAAoB,CAAC,OAAO,CAAC,EAAE,OAAO,CAAC,EAAE,gBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC;IA6B3F;;OAEG;IACH,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAGvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js new file mode 100644 index 0000000..351691a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { GenericPollOperation } from "./operation.js"; +import { POLL_INTERVAL_IN_MS } from "../../poller/constants.js"; +import { Poller } from "../poller.js"; +import { deserializeState } from "../../poller/operation.js"; +/** + * The LRO Engine, a class that performs polling. + */ +export class LroEngine extends Poller { + config; + constructor(lro, options) { + const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +//# sourceMappingURL=lroEngine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js.map new file mode 100644 index 0000000..d5a5ef2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAEtD,OAAO,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAEhE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,OAAO,EAAE,gBAAgB,EAAE,MAAM,2BAA2B,CAAC;AAE7D;;GAEG;AACH,MAAM,OAAO,SAA+D,SAAQ,MAGnF;IACS,MAAM,CAAe;IAE7B,YAAY,GAAkC,EAAE,OAA2C;QACzF,MAAM,EACJ,YAAY,GAAG,mBAAmB,EAClC,UAAU,EACV,qBAAqB,GAAG,KAAK,EAC7B,MAAM,EACN,yBAAyB,EACzB,aAAa,EACb,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,KAAK,GAAqC,UAAU;YACxD,CAAC,CAAC,gBAAgB,CAAC,UAAU,CAAC;YAC9B,CAAC,CAAE,EAAuC,CAAC;QAC7C,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,CAAC,qBAAqB,EACtB,yBAAyB,EACzB,aAAa,EACb,WAAW,EACX,MAAM,CACP,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QACjB,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;QAEnD,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;QAC7C,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;IACzF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroEngineOptions, PollerConfig } from \"./models.js\";\nimport { GenericPollOperation } from \"./operation.js\";\nimport { LongRunningOperation } from \"../../http/models.js\";\nimport { POLL_INTERVAL_IN_MS } from \"../../poller/constants.js\";\nimport { PollOperationState } from \"../pollOperation.js\";\nimport { Poller } from \"../poller.js\";\nimport { RestorableOperationState } from \"../../poller/models.js\";\nimport { deserializeState } from \"../../poller/operation.js\";\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const {\n intervalInMs = POLL_INTERVAL_IN_MS,\n resumeFrom,\n resolveOnUnsuccessful = false,\n isDone,\n lroResourceLocationConfig,\n processResult,\n updateState,\n } = options || {};\n const state: RestorableOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as RestorableOperationState);\n const operation = new GenericPollOperation(\n state,\n lro,\n !resolveOnUnsuccessful,\n lroResourceLocationConfig,\n processResult,\n updateState,\n isDone,\n );\n super(operation);\n this.resolveOnUnsuccessful = resolveOnUnsuccessful;\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.d.ts new file mode 100644 index 0000000..bf26d04 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.d.ts @@ -0,0 +1,38 @@ +import { LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +/** + * Options for the LRO poller. + */ +export interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful?: boolean; +} +export interface PollerConfig { + intervalInMs: number; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.d.ts.map new file mode 100644 index 0000000..c880365 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAE9E;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,OAAO,EAAE,MAAM;IAC/C;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,yBAAyB,CAAC,EAAE,yBAAyB,CAAC;IACtD;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,KAAK,IAAI,CAAC;IACjE;;OAEG;IACH,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC3D;;OAEG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,YAAY;IAC3B,YAAY,EAAE,MAAM,CAAC;CACtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.js b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.js.map new file mode 100644 index 0000000..bfc01b5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful?: boolean;\n}\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.d.ts new file mode 100644 index 0000000..d1257d1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.d.ts @@ -0,0 +1,27 @@ +import { LongRunningOperation, LroResourceLocationConfig, RawResponse } from "../../http/models.js"; +import { PollOperation, PollOperationState } from "../pollOperation.js"; +import { RestorableOperationState } from "../../poller/models.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { PollerConfig } from "./models.js"; +export declare class GenericPollOperation> implements PollOperation { + state: RestorableOperationState; + private lro; + private setErrorAsResult; + private lroResourceLocationConfig?; + private processResult?; + private updateState?; + private isDone?; + private pollerConfig?; + constructor(state: RestorableOperationState, lro: LongRunningOperation, setErrorAsResult: boolean, lroResourceLocationConfig?: LroResourceLocationConfig | undefined, processResult?: ((result: unknown, state: TState) => TResult) | undefined, updateState?: ((state: TState, lastResponse: RawResponse) => void) | undefined, isDone?: ((lastResponse: TResult, state: TState) => boolean) | undefined); + setPollerConfig(pollerConfig: PollerConfig): void; + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + cancel(): Promise>; + /** + * Serializes the Poller operation. + */ + toString(): string; +} +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.d.ts.map new file mode 100644 index 0000000..9b66455 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACpG,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACxE,OAAO,EAAE,wBAAwB,EAAc,MAAM,wBAAwB,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAyB3C,qBAAa,oBAAoB,CAAC,OAAO,EAAE,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,CACnF,YAAW,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAKhC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC;IAC9C,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,gBAAgB;IACxB,OAAO,CAAC,yBAAyB,CAAC;IAClC,OAAO,CAAC,aAAa,CAAC;IACtB,OAAO,CAAC,WAAW,CAAC;IACpB,OAAO,CAAC,MAAM,CAAC;IATjB,OAAO,CAAC,YAAY,CAAC,CAAe;gBAG3B,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,EACtC,GAAG,EAAE,oBAAoB,EACzB,gBAAgB,EAAE,OAAO,EACzB,yBAAyB,CAAC,uCAA2B,EACrD,aAAa,CAAC,YAAW,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA,EAC3D,WAAW,CAAC,WAAU,MAAM,gBAAgB,WAAW,KAAK,IAAI,aAAA,EAChE,MAAM,CAAC,kBAAiB,OAAO,SAAS,MAAM,KAAK,OAAO,aAAA;IAG7D,eAAe,CAAC,YAAY,EAAE,YAAY,GAAG,IAAI;IAIlD,MAAM,CAAC,OAAO,CAAC,EAAE;QACrB,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAwCrC,MAAM,IAAI,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAKvD;;OAEG;IACI,QAAQ,IAAI,MAAM;CAK1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js new file mode 100644 index 0000000..4051ad8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { initHttpOperation, pollHttpOperation } from "../../http/operation.js"; +import { logger } from "../../logger.js"; +const createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => (state.isCancelled = true), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.isStarted = true), + setSucceeded: (state) => (state.isCompleted = true), + setFailed: () => { + /** empty body */ + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), +}); +export class GenericPollOperation { + state; + lro; + setErrorAsResult; + lroResourceLocationConfig; + processResult; + updateState; + isDone; + pollerConfig; + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = { + ...this.state, + ...(await initHttpOperation({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + })), + }; + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === undefined) { + await pollHttpOperation({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState + ? (state, { rawResponse }) => updateState(state, rawResponse) + : undefined, + isDone: isDone + ? ({ flatResponse }, state) => isDone(flatResponse, state) + : undefined, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult, + }); + } + options?.fireProgress?.(this.state); + return this; + } + async cancel() { + logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js.map new file mode 100644 index 0000000..de88f0a --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../../src/legacy/lroEngine/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAG/E,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAQ;IAC3D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;IAC/C,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IACnD,SAAS,EAAE,GAAG,EAAE;QACd,iBAAiB;IACnB,CAAC;IAED,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW;IAC1C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;IAClC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS;IACvC,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;CACzF,CAAC,CAAC;AAEH,MAAM,OAAO,oBAAoB;IAMtB;IACC;IACA;IACA;IACA;IACA;IACA;IATF,YAAY,CAAgB;IAEpC,YACS,KAAuC,EACtC,GAAyB,EACzB,gBAAyB,EACzB,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D;QAN3D,UAAK,GAAL,KAAK,CAAkC;QACtC,QAAG,GAAH,GAAG,CAAsB;QACzB,qBAAgB,GAAhB,gBAAgB,CAAS;QACzB,8BAAyB,GAAzB,yBAAyB,CAA4B;QACrD,kBAAa,GAAb,aAAa,CAA8C;QAC3D,gBAAW,GAAX,WAAW,CAAqD;QAChE,WAAM,GAAN,MAAM,CAAoD;IACjE,CAAC;IAEG,eAAe,CAAC,YAA0B;QAC/C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,OAGZ;QACC,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,GAAG;gBACX,GAAG,IAAI,CAAC,KAAK;gBACb,GAAG,CAAC,MAAM,iBAAiB,CAAC;oBAC1B,GAAG,EAAE,IAAI,CAAC,GAAG;oBACb,UAAU;oBACV,sBAAsB,EAAE,IAAI,CAAC,yBAAyB;oBACtD,aAAa,EAAE,IAAI,CAAC,aAAa;oBACjC,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;iBACxC,CAAC,CAAC;aACJ,CAAC;QACJ,CAAC;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;QACrC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAE3B,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC9D,MAAM,iBAAiB,CAAC;gBACtB,GAAG,EAAE,IAAI,CAAC,GAAG;gBACb,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,UAAU;gBACV,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,WAAW,EAAE,WAAW;oBACtB,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,WAAW,EAAE,EAAE,EAAE,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,CAAC;oBAC7D,CAAC,CAAC,SAAS;gBACb,MAAM,EAAE,MAAM;oBACZ,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,CAAC,MAAM,CAAC,YAAuB,EAAE,KAAK,CAAC;oBACrE,CAAC,CAAC,SAAS;gBACb,OAAO;gBACP,QAAQ,EAAE,CAAC,YAAY,EAAE,EAAE;oBACzB,IAAI,CAAC,YAAa,CAAC,YAAY,GAAG,YAAY,CAAC;gBACjD,CAAC;gBACD,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;aACxC,CAAC,CAAC;QACL,CAAC;QACD,OAAO,EAAE,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,MAAM;QACV,MAAM,CAAC,KAAK,CAAC,+DAA+D,CAAC,CAAC;QAC9E,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LongRunningOperation, LroResourceLocationConfig, RawResponse } from \"../../http/models.js\";\nimport { PollOperation, PollOperationState } from \"../pollOperation.js\";\nimport { RestorableOperationState, StateProxy } from \"../../poller/models.js\";\nimport { initHttpOperation, pollHttpOperation } from \"../../http/operation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { PollerConfig } from \"./models.js\";\nimport { logger } from \"../../logger.js\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n initState: (config) => ({ config, isStarted: true }) as any,\n setCanceled: (state) => (state.isCancelled = true),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.isStarted = true),\n setSucceeded: (state) => (state.isCompleted = true),\n setFailed: () => {\n /** empty body */\n },\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => !!state.isCancelled,\n isFailed: (state) => !!state.error,\n isRunning: (state) => !!state.isStarted,\n isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error),\n});\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private pollerConfig?: PollerConfig;\n\n constructor(\n public state: RestorableOperationState,\n private lro: LongRunningOperation,\n private setErrorAsResult: boolean,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean,\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const stateProxy = createStateProxy();\n if (!this.state.isStarted) {\n this.state = {\n ...this.state,\n ...(await initHttpOperation({\n lro: this.lro,\n stateProxy,\n resourceLocationConfig: this.lroResourceLocationConfig,\n processResult: this.processResult,\n setErrorAsResult: this.setErrorAsResult,\n })),\n };\n }\n const updateState = this.updateState;\n const isDone = this.isDone;\n\n if (!this.state.isCompleted && this.state.error === undefined) {\n await pollHttpOperation({\n lro: this.lro,\n state: this.state,\n stateProxy,\n processResult: this.processResult,\n updateState: updateState\n ? (state, { rawResponse }) => updateState(state, rawResponse)\n : undefined,\n isDone: isDone\n ? ({ flatResponse }, state) => isDone(flatResponse as TResult, state)\n : undefined,\n options,\n setDelay: (intervalInMs) => {\n this.pollerConfig!.intervalInMs = intervalInMs;\n },\n setErrorAsResult: this.setErrorAsResult,\n });\n }\n options?.fireProgress?.(this.state);\n return this;\n }\n\n async cancel(): Promise> {\n logger.error(\"`cancelOperation` is deprecated because it wasn't implemented\");\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/models.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/models.d.ts new file mode 100644 index 0000000..6ecd744 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/models.d.ts @@ -0,0 +1,66 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollOperationState } from "./pollOperation.js"; +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * @deprecated `cancelOperation` has been deprecated because it was not implemented. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/models.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/models.d.ts.map new file mode 100644 index 0000000..f8e2c78 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAExD;;GAEG;AAEH,MAAM,WAAW,UAAU,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO;IAC7E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;;OAGG;IACH,eAAe,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5E;;;;OAIG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/models.js b/node_modules/@azure/core-lro/dist/react-native/legacy/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/models.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/models.js.map new file mode 100644 index 0000000..4f2a390 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/legacy/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollOperationState } from \"./pollOperation.js\";\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n * @deprecated `cancelOperation` has been deprecated because it was not implemented.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.d.ts new file mode 100644 index 0000000..9d1b341 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.d.ts @@ -0,0 +1,81 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + * + * @deprecated `cancel` has been deprecated because it was not implemented. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} +//# sourceMappingURL=pollOperation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.d.ts.map new file mode 100644 index 0000000..257ba63 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.d.ts","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;;;;;;;;GAUG;AACH,MAAM,WAAW,kBAAkB,CAAC,OAAO;IACzC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,WAAW,aAAa,CAAC,MAAM,EAAE,OAAO;IAC5C;;;;OAIG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;;;;;;OAQG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QACf,WAAW,CAAC,EAAE,eAAe,CAAC;QAC9B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;KACxC,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE5C;;;;;;;;;;OAUG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAE7F;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.js b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.js new file mode 100644 index 0000000..f8c1767 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pollOperation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.js.map new file mode 100644 index 0000000..a1437f2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/pollOperation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.js","sourceRoot":"","sources":["../../../src/legacy/pollOperation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * PollOperationState contains an opinionated list of the smallest set of properties needed\n * to define any long running operation poller.\n *\n * While the Poller class works as the local control mechanism to start triggering, wait for,\n * and potentially cancel a long running operation, the PollOperationState documents the status\n * of the remote long running operation.\n *\n * It should be updated at least when the operation starts, when it's finished, and when it's cancelled.\n * Though, implementations can have any other number of properties that can be updated by other reasons.\n */\nexport interface PollOperationState {\n /**\n * True if the operation has started.\n */\n isStarted?: boolean;\n /**\n * True if the operation has been completed.\n */\n isCompleted?: boolean;\n /**\n * True if the operation has been cancelled.\n */\n isCancelled?: boolean;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation concluded in a result of an expected type.\n */\n result?: TResult;\n}\n\n/**\n * PollOperation is an interface that defines how to update the local reference of the state of the remote\n * long running operation, just as well as how to request the cancellation of the same operation.\n *\n * It also has a method to serialize the operation so that it can be stored and resumed at any time.\n */\nexport interface PollOperation {\n /**\n * The state of the operation.\n * It will be used to store the basic properties of PollOperationState,\n * plus any custom property that the implementation may require.\n */\n state: TState;\n\n /**\n * Defines how to request the remote service for updates on the status of the long running operation.\n *\n * It optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n * Also optionally receives a \"fireProgress\" function, which, if called, is responsible for triggering the\n * poller's onProgress callbacks.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise>;\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * It returns a promise that should be resolved with an updated version of the poller's operation.\n *\n * @param options - Optional properties passed to the operation's update method.\n *\n * @deprecated `cancel` has been deprecated because it was not implemented.\n */\n cancel(options?: { abortSignal?: AbortSignalLike }): Promise>;\n\n /**\n * Serializes the operation.\n * Useful when wanting to create a poller that monitors an existing operation.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/poller.d.ts b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.d.ts new file mode 100644 index 0000000..e748266 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.d.ts @@ -0,0 +1,327 @@ +import { PollOperation, PollOperationState } from "./pollOperation.js"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress } from "../poller/models.js"; +import { PollerLike } from "./models.js"; +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export type PollProgressCallback = (state: TState) => void; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + /** controls whether to throw an error if the operation failed or was canceled. */ + protected resolveOnUnsuccessful: boolean; + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-util"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + private processUpdatedState; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/poller.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.d.ts.map new file mode 100644 index 0000000..8ae1b13 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;;GAIG;AACH,MAAM,MAAM,oBAAoB,CAAC,MAAM,IAAI,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;AAEnE;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,KAAK;gBAC/B,OAAO,EAAE,MAAM;CAK5B;AAED;;;GAGG;AACH,qBAAa,oBAAqB,SAAQ,KAAK;gBACjC,OAAO,EAAE,MAAM;CAK5B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AAEH,8BAAsB,MAAM,CAAC,MAAM,SAAS,kBAAkB,CAAC,OAAO,CAAC,EAAE,OAAO,CAC9E,YAAW,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC;IAEtC,kFAAkF;IAClF,SAAS,CAAC,qBAAqB,EAAE,OAAO,CAAS;IACjD,OAAO,CAAC,OAAO,CAAiB;IAChC,OAAO,CAAC,OAAO,CAAC,CAA2B;IAC3C,OAAO,CAAC,MAAM,CAAC,CAAqE;IACpF,OAAO,CAAC,eAAe,CAAC,CAAgB;IACxC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,OAAO,CAAmB;IAClC,OAAO,CAAC,qBAAqB,CAAsC;IAEnE;;;OAGG;IACH,SAAS,CAAC,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;gBACS,SAAS,EAAE,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;IAmBrD;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,SAAS,CAAC,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEzC;;;OAGG;YACW,YAAY;IAU1B;;;;;;;OAOG;YACW,QAAQ;IAUtB;;;;;;;OAOG;IACH,OAAO,CAAC,YAAY;IAMpB;;OAEG;YACW,UAAU;IAIxB;;;;;;;OAOG;IACI,IAAI,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAW3E,OAAO,CAAC,mBAAmB;IA0B3B;;OAEG;IACU,aAAa,CACxB,WAAW,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAClD,OAAO,CAAC,OAAO,CAAC;IAUnB;;;;;OAKG;IACI,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB;IAOtE;;OAEG;IACI,MAAM,IAAI,OAAO;IAKxB;;OAEG;IACI,WAAW,IAAI,IAAI;IAS1B;;OAEG;IACI,SAAS,IAAI,OAAO;IAI3B;;;;;;;;OAQG;IACI,eAAe,CAAC,OAAO,GAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAStF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB,IAAI,MAAM;IAIlC;;;;;OAKG;IACI,SAAS,IAAI,OAAO,GAAG,SAAS;IAKvC;;;OAGG;IACI,QAAQ,IAAI,MAAM;CAG1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js new file mode 100644 index 0000000..de6f912 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js @@ -0,0 +1,407 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +export class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +export class Poller { + /** controls whether to throw an error if the operation failed or was canceled. */ + resolveOnUnsuccessful = false; + stopped = true; + resolve; + reject; + pollOncePromise; + cancelPromise; + promise; + pollProgressCallbacks = []; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + operation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } + } + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + // This is needed because the state could have been updated by + // `cancelOperation`, e.g. the operation is canceled or an error occurred. + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js.map b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js.map new file mode 100644 index 0000000..d3bec33 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/legacy/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAclC;;;GAGG;AACH,MAAM,OAAO,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;IAC5D,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;IAC9D,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AACH,gDAAgD;AAChD,MAAM,OAAgB,MAAM;IAG1B,kFAAkF;IACxE,qBAAqB,GAAY,KAAK,CAAC;IACzC,OAAO,GAAY,IAAI,CAAC;IACxB,OAAO,CAA4B;IACnC,MAAM,CAAsE;IAC5E,eAAe,CAAiB;IAChC,aAAa,CAAiB;IAC9B,OAAO,CAAmB;IAC1B,qBAAqB,GAAmC,EAAE,CAAC;IAEnE;;;OAGG;IACO,SAAS,CAAiC;IAEpD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;IACH,YAAY,SAAyC;QACnD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,EAC1E,EAAE;YACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACvB,CAAC,CACF,CAAC;QACF,mFAAmF;QACnF,sFAAsF;QACtF,mFAAmF;QACnF,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE;YACtB,yBAAyB;QAC3B,CAAC,CAAC,CAAC;IACL,CAAC;IAyBD;;;OAGG;IACK,KAAK,CAAC,YAAY,CAAC,cAAiD,EAAE;QAC5E,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;QACvB,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YAC3C,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAC7B,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QACrB,CAAC;IACH,CAAC;IAED;;;;;;;OAOG;IACK,KAAK,CAAC,QAAQ,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YACnB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;gBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;aAC3C,CAAC,CAAC;QACL,CAAC;QACD,IAAI,CAAC,mBAAmB,EAAE,CAAC;IAC7B,CAAC;IAED;;;;;;;OAOG;IACK,YAAY,CAAC,KAAa;QAChC,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE,CAAC;YAClD,QAAQ,CAAC,KAAK,CAAC,CAAC;QAClB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,UAAU,CAAC,UAA6C,EAAE;QACtE,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACxD,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CAAC,UAA6C,EAAE;QACzD,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,GAAS,EAAE;gBACtC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;YACnC,CAAC,CAAC;YACF,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3F,CAAC;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;IAC9B,CAAC;IAEO,mBAAmB;QACzB,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,IAAI,CAAC,MAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBACzC,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC;YACnC,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;YACrC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC;gBAChC,MAAM,KAAK,GAAG,IAAI,oBAAoB,CAAC,wBAAwB,CAAC,CAAC;gBACjE,IAAI,CAAC,MAAO,CAAC,KAAK,CAAC,CAAC;gBACpB,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YAClC,uEAAuE;YACvE,uEAAuE;YACvE,oEAAoE;YACpE,uEAAuE;YACvE,cAAc;YACd,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAa,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,aAAa,CACxB,cAAiD,EAAE;QAEnD,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,CAAC;QACD,8DAA8D;QAC9D,0EAA0E;QAC1E,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAC3B,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;OAKG;IACI,UAAU,CAAC,QAAiC;QACjD,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC1C,OAAO,GAAS,EAAE;YAChB,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;QACxF,CAAC,CAAC;IACJ,CAAC;IAED;;OAEG;IACI,MAAM;QACX,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;gBAChB,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;YACxE,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC;YACxB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QAChD,CAAC;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;QAC3D,CAAC;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB;QACtB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B,CAAC;IAED;;;;;OAKG;IACI,SAAS;QACd,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation.js\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress } from \"../poller/models.js\";\nimport { PollerLike } from \"./models.js\";\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When the operation is cancelled, the poller will be rejected with an instance\n * of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n /** controls whether to throw an error if the operation failed or was canceled. */\n protected resolveOnUnsuccessful: boolean = false;\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void,\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n },\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-util has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-util\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(pollOptions: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll(pollOptions);\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n }\n this.processUpdatedState();\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n private processUpdatedState(): void {\n if (this.operation.state.error) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n this.reject!(this.operation.state.error);\n throw this.operation.state.error;\n }\n }\n if (this.operation.state.isCancelled) {\n this.stopped = true;\n if (!this.resolveOnUnsuccessful) {\n const error = new PollerCancelledError(\"Operation was canceled\");\n this.reject!(error);\n throw error;\n }\n }\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.getResult() as TResult);\n }\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(\n pollOptions: { abortSignal?: AbortSignalLike } = {},\n ): Promise {\n if (this.stopped) {\n this.startPolling(pollOptions).catch(this.reject);\n }\n // This is needed because the state could have been updated by\n // `cancelOperation`, e.g. the operation is canceled or an error occurred.\n this.processUpdatedState();\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/logger.d.ts b/node_modules/@azure/core-lro/dist/react-native/logger.d.ts new file mode 100644 index 0000000..52138fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/logger.d.ts @@ -0,0 +1,6 @@ +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=logger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/logger.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/logger.d.ts.map new file mode 100644 index 0000000..3763b49 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/logger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,eAAO,MAAM,MAAM,qCAAiC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/logger.js b/node_modules/@azure/core-lro/dist/react-native/logger.js new file mode 100644 index 0000000..094bfe4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/logger.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export const logger = createClientLogger("core-lro"); +//# sourceMappingURL=logger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/logger.js.map b/node_modules/@azure/core-lro/dist/react-native/logger.js.map new file mode 100644 index 0000000..8f21af4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/logger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../src/logger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;;GAGG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/package.json b/node_modules/@azure/core-lro/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/constants.d.ts b/node_modules/@azure/core-lro/dist/react-native/poller/constants.d.ts new file mode 100644 index 0000000..a4e88ee --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/constants.d.ts @@ -0,0 +1,9 @@ +/** + * The default time interval to wait before sending the next polling request. + */ +export declare const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export declare const terminalStates: string[]; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/constants.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/poller/constants.d.ts.map new file mode 100644 index 0000000..206a7fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,mBAAmB,OAAO,CAAC;AACxC;;GAEG;AACH,eAAO,MAAM,cAAc,UAAsC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/constants.js b/node_modules/@azure/core-lro/dist/react-native/poller/constants.js new file mode 100644 index 0000000..e35aad7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/constants.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The default time interval to wait before sending the next polling request. + */ +export const POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +export const terminalStates = ["succeeded", "canceled", "failed"]; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/constants.js.map b/node_modules/@azure/core-lro/dist/react-native/poller/constants.js.map new file mode 100644 index 0000000..539a956 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/poller/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,CAAC;AACxC;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,WAAW,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The default time interval to wait before sending the next polling request.\n */\nexport const POLL_INTERVAL_IN_MS = 2000;\n/**\n * The closed set of terminal states.\n */\nexport const terminalStates = [\"succeeded\", \"canceled\", \"failed\"];\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/models.d.ts b/node_modules/@azure/core-lro/dist/react-native/poller/models.d.ts new file mode 100644 index 0000000..80845ac --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/models.d.ts @@ -0,0 +1,221 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * Configurations for how to poll the operation and to check whether it has + * terminated. + */ +export interface OperationConfig { + /** The operation location */ + operationLocation?: string; + /** The resource location */ + resourceLocation?: string; + /** metadata about the operation */ + metadata?: Record; +} +/** + * The description of an operation. + */ +export interface Operation { + /** + * Sends the initiation request and returns, in addition to the response, the + * operation location, the potential resource location, and a set of metadata. + */ + init: () => Promise; + /** + * Sends the polling request. + */ + poll: (location: string, options?: TOptions) => Promise; +} +/** + * Type of a restorable long-running operation. + */ +export type RestorableOperationState = T & { + /** The operation configuration */ + config: OperationConfig; +}; +/** + * Options for `createPoller`. + */ +export interface CreatePollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + restoreFrom?: string; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: TResponse, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: TResponse) => void; + /** + * A function to be called each time the operation location is updated by the + * service. + */ + withOperationLocation?: (operationLocation: string) => void; +} +export interface LroError { + code: string; + innererror?: InnerError; + message: string; +} +export interface InnerError { + code: string; + message: string; + innererror?: InnerError; +} +/** + * Options for `buildCreatePoller`. + */ +export interface BuildCreatePollerOptions { + /** + * Gets the status of the operation from the response received when the + * operation was initialized. Note that the operation could be already in + * a terminal state at this time. + */ + getStatusFromInitialResponse: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + /** + * Gets the status of the operation from a response received when the + * operation was polled. + */ + getStatusFromPollResponse: (response: TResponse, state: RestorableOperationState) => OperationStatus; + /** + * Determines if the input error is an operation error. + */ + isOperationError: (error: Error) => boolean; + /** + * Gets the updated operation location from polling responses. + */ + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets the resource location from a response. + */ + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + /** + * Gets from the response the time interval the service suggests the client to + * wait before sending the next polling request. + */ + getPollingInterval?: (response: TResponse) => number | undefined; + /** + * Extracts an error model from a response. + */ + getError?: (response: TResponse) => LroError | undefined; + /** + * Control whether to throw an exception if the operation failed or was canceled. + */ + resolveOnUnsuccessful: boolean; +} +/** + * The set of possible states an operation can be in at any given time. + */ +export type OperationStatus = "notStarted" | "running" | "succeeded" | "canceled" | "failed"; +/** + * While the poller works as the local control mechanism to start triggering and + * wait for a long-running operation, OperationState documents the status of + * the remote long-running operation. It gets updated after each poll. + */ +export interface OperationState { + /** + * The current status of the operation. + */ + status: OperationStatus; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation produced a result of the expected type. + */ + result?: TResult; +} +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export type CancelOnProgress = () => void; +/** + * A simple poller interface. + */ +export interface SimplePollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Returns the state of the operation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +/** + * A state proxy that allows poller implementation to abstract away the operation + * state. This is useful to implement `lroEngine` and `createPoller` in a modular + * way. + */ +export interface StateProxy { + initState: (config: OperationConfig) => RestorableOperationState; + setRunning: (state: TState) => void; + setCanceled: (state: TState) => void; + setResult: (state: TState, result: TResult) => void; + setError: (state: TState, error: Error) => void; + setFailed: (state: TState) => void; + setSucceeded: (state: TState) => void; + isRunning: (state: TState) => boolean; + isCanceled: (state: TState) => boolean; + getResult: (state: TState) => TResult | undefined; + getError: (state: TState) => Error | undefined; + isFailed: (state: TState) => boolean; + isSucceeded: (state: TState) => boolean; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/models.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/poller/models.d.ts.map new file mode 100644 index 0000000..6e85a3f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE1D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,6BAA6B;IAC7B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,4BAA4B;IAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,mCAAmC;IACnC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACnC;AAED;;GAEG;AACH,MAAM,WAAW,SAAS,CAAC,SAAS,EAAE,QAAQ;IAC5C;;;OAGG;IACH,IAAI,EAAE,MAAM,OAAO,CACjB,eAAe,GAAG;QAChB,QAAQ,EAAE,SAAS,CAAC;KACrB,CACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,KAAK,OAAO,CAAC,SAAS,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,MAAM,wBAAwB,CAAC,CAAC,IAAI,CAAC,GAAG;IAC5C,kCAAkC;IAClC,MAAM,EAAE,eAAe,CAAC;CACzB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM;IAC7D;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D;;OAEG;IACH,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,KAAK,IAAI,CAAC;CAC7D;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB,CAAC,SAAS,EAAE,MAAM;IACzD;;;;OAIG;IACH,4BAA4B,EAAE,CAAC,MAAM,EAAE;QACrC,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB;;;OAGG;IACH,yBAAyB,EAAE,CACzB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB;;OAEG;IACH,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C;;OAEG;IACH,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;OAEG;IACH,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD;;OAEG;IACH,qBAAqB,EAAE,OAAO,CAAC;CAChC;AAED;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,YAAY,GAAG,SAAS,GAAG,WAAW,GAAG,UAAU,GAAG,QAAQ,CAAC;AAE7F;;;;GAIG;AACH,MAAM,WAAW,cAAc,CAAC,OAAO;IACrC;;OAEG;IACH,MAAM,EAAE,eAAe,CAAC;IACxB;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd;;OAEG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED;;;;;GAKG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC;AAE1C;;GAEG;AACH,MAAM,WAAW,gBAAgB,CAAC,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAAE,OAAO;IAC/E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE;;OAEG;IACH,aAAa,CAAC,WAAW,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,eAAe,CAAA;KAAE,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACjF;;;;;OAKG;IACH,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,gBAAgB,CAAC;IAChE;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC;IAClB;;OAEG;IACH,WAAW,IAAI,IAAI,CAAC;IACpB;;OAEG;IACH,SAAS,IAAI,OAAO,CAAC;IACrB;;OAEG;IACH,iBAAiB,IAAI,MAAM,CAAC;IAC5B;;;;;OAKG;IACH,SAAS,IAAI,OAAO,GAAG,SAAS,CAAC;IACjC;;;OAGG;IACH,QAAQ,IAAI,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU,CAAC,MAAM,EAAE,OAAO;IACzC,SAAS,EAAE,CAAC,MAAM,EAAE,eAAe,KAAK,wBAAwB,CAAC,MAAM,CAAC,CAAC;IAEzE,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,KAAK,IAAI,CAAC;IACpD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IAChD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,YAAY,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACtC,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACvC,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,SAAS,CAAC;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,KAAK,GAAG,SAAS,CAAC;IAC/C,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IACrC,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;CACzC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/models.js b/node_modules/@azure/core-lro/dist/react-native/poller/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/models.js.map b/node_modules/@azure/core-lro/dist/react-native/poller/models.js.map new file mode 100644 index 0000000..554eb0c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/poller/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Configurations for how to poll the operation and to check whether it has\n * terminated.\n */\nexport interface OperationConfig {\n /** The operation location */\n operationLocation?: string;\n /** The resource location */\n resourceLocation?: string;\n /** metadata about the operation */\n metadata?: Record;\n}\n\n/**\n * The description of an operation.\n */\nexport interface Operation {\n /**\n * Sends the initiation request and returns, in addition to the response, the\n * operation location, the potential resource location, and a set of metadata.\n */\n init: () => Promise<\n OperationConfig & {\n response: TResponse;\n }\n >;\n /**\n * Sends the polling request.\n */\n poll: (location: string, options?: TOptions) => Promise;\n}\n\n/**\n * Type of a restorable long-running operation.\n */\nexport type RestorableOperationState = T & {\n /** The operation configuration */\n config: OperationConfig;\n};\n\n/**\n * Options for `createPoller`.\n */\nexport interface CreatePollerOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n restoreFrom?: string;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: TResponse, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: TResponse) => void;\n /**\n * A function to be called each time the operation location is updated by the\n * service.\n */\n withOperationLocation?: (operationLocation: string) => void;\n}\n\nexport interface LroError {\n code: string;\n innererror?: InnerError;\n message: string;\n}\n\nexport interface InnerError {\n code: string;\n message: string;\n innererror?: InnerError;\n}\n\n/**\n * Options for `buildCreatePoller`.\n */\nexport interface BuildCreatePollerOptions {\n /**\n * Gets the status of the operation from the response received when the\n * operation was initialized. Note that the operation could be already in\n * a terminal state at this time.\n */\n getStatusFromInitialResponse: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n /**\n * Gets the status of the operation from a response received when the\n * operation was polled.\n */\n getStatusFromPollResponse: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n /**\n * Determines if the input error is an operation error.\n */\n isOperationError: (error: Error) => boolean;\n /**\n * Gets the updated operation location from polling responses.\n */\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets the resource location from a response.\n */\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n /**\n * Gets from the response the time interval the service suggests the client to\n * wait before sending the next polling request.\n */\n getPollingInterval?: (response: TResponse) => number | undefined;\n /**\n * Extracts an error model from a response.\n */\n getError?: (response: TResponse) => LroError | undefined;\n /**\n * Control whether to throw an exception if the operation failed or was canceled.\n */\n resolveOnUnsuccessful: boolean;\n}\n\n/**\n * The set of possible states an operation can be in at any given time.\n */\nexport type OperationStatus = \"notStarted\" | \"running\" | \"succeeded\" | \"canceled\" | \"failed\";\n\n/**\n * While the poller works as the local control mechanism to start triggering and\n * wait for a long-running operation, OperationState documents the status of\n * the remote long-running operation. It gets updated after each poll.\n */\nexport interface OperationState {\n /**\n * The current status of the operation.\n */\n status: OperationStatus;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation produced a result of the expected type.\n */\n result?: TResult;\n}\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * A simple poller interface.\n */\nexport interface SimplePollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Returns the state of the operation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A state proxy that allows poller implementation to abstract away the operation\n * state. This is useful to implement `lroEngine` and `createPoller` in a modular\n * way.\n */\nexport interface StateProxy {\n initState: (config: OperationConfig) => RestorableOperationState;\n\n setRunning: (state: TState) => void;\n setCanceled: (state: TState) => void;\n setResult: (state: TState, result: TResult) => void;\n setError: (state: TState, error: Error) => void;\n setFailed: (state: TState) => void;\n setSucceeded: (state: TState) => void;\n\n isRunning: (state: TState) => boolean;\n isCanceled: (state: TState) => boolean;\n getResult: (state: TState) => TResult | undefined;\n getError: (state: TState) => Error | undefined;\n isFailed: (state: TState) => boolean;\n isSucceeded: (state: TState) => boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/operation.d.ts b/node_modules/@azure/core-lro/dist/react-native/poller/operation.d.ts new file mode 100644 index 0000000..a23a90f --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/operation.d.ts @@ -0,0 +1,40 @@ +import { LroError, Operation, OperationStatus, RestorableOperationState, StateProxy } from "./models.js"; +/** + * Deserializes the state + */ +export declare function deserializeState(serializedState: string): RestorableOperationState; +/** + * Initiates the long-running operation. + */ +export declare function initOperation(inputs: { + init: Operation["init"]; + stateProxy: StateProxy; + getOperationStatus: (inputs: { + response: TResponse; + state: RestorableOperationState; + operationLocation?: string; + }) => OperationStatus; + processResult?: (result: TResponse, state: TState) => TResult; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + setErrorAsResult: boolean; +}): Promise>; +/** Polls the long-running operation. */ +export declare function pollOperation(inputs: { + poll: Operation["poll"]; + stateProxy: StateProxy; + state: RestorableOperationState; + getOperationStatus: (response: TResponse, state: RestorableOperationState) => OperationStatus; + getResourceLocation: (response: TResponse, state: RestorableOperationState) => string | undefined; + isOperationError: (error: Error) => boolean; + getPollingInterval?: (response: TResponse) => number | undefined; + setDelay: (intervalInMs: number) => void; + getOperationLocation?: (response: TResponse, state: RestorableOperationState) => string | undefined; + withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void; + processResult?: (result: TResponse, state: TState) => TResult; + getError?: (response: TResponse) => LroError | undefined; + updateState?: (state: TState, lastResponse: TResponse) => void; + isDone?: (lastResponse: TResponse, state: TState) => boolean; + setErrorAsResult: boolean; + options?: TOptions; +}): Promise; +//# sourceMappingURL=operation.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/operation.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/poller/operation.d.ts.map new file mode 100644 index 0000000..d8a62db --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/operation.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.d.ts","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,QAAQ,EAER,SAAS,EACT,eAAe,EACf,wBAAwB,EACxB,UAAU,EACX,MAAM,aAAa,CAAC;AAIrB;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,MAAM,EACrC,eAAe,EAAE,MAAM,GACtB,wBAAwB,CAAC,MAAM,CAAC,CAMlC;AAuGD;;GAEG;AACH,wBAAsB,aAAa,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;IACtE,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC;IAC5C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAAC,MAAM,EAAE;QAC3B,QAAQ,EAAE,SAAS,CAAC;QACpB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;QACxC,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,KAAK,eAAe,CAAC;IACtB,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,gBAAgB,EAAE,OAAO,CAAC;CAC3B,GAAG,OAAO,CAAC,wBAAwB,CAAC,MAAM,CAAC,CAAC,CAqB5C;AA4DD,wCAAwC;AACxC,wBAAsB,aAAa,CAAC,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;IAChF,IAAI,EAAE,SAAS,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC;IAC7C,UAAU,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACxC,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,CAAC;IACxC,kBAAkB,EAAE,CAClB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,eAAe,CAAC;IACrB,mBAAmB,EAAE,CACnB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,gBAAgB,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IAC5C,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,MAAM,GAAG,SAAS,CAAC;IACjE,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM,KAAK,IAAI,CAAC;IACzC,oBAAoB,CAAC,EAAE,CACrB,QAAQ,EAAE,SAAS,EACnB,KAAK,EAAE,wBAAwB,CAAC,MAAM,CAAC,KACpC,MAAM,GAAG,SAAS,CAAC;IACxB,qBAAqB,CAAC,EAAE,CAAC,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,KAAK,IAAI,CAAC;IAChF,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC9D,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,KAAK,QAAQ,GAAG,SAAS,CAAC;IACzD,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,KAAK,IAAI,CAAC;IAC/D,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC;IAC7D,gBAAgB,EAAE,OAAO,CAAC;IAC1B,OAAO,CAAC,EAAE,QAAQ,CAAC;CACpB,GAAG,OAAO,CAAC,IAAI,CAAC,CAsDhB"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/operation.js b/node_modules/@azure/core-lro/dist/react-native/poller/operation.js new file mode 100644 index 0000000..869eaa5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/operation.js @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger } from "../logger.js"; +import { terminalStates } from "./constants.js"; +/** + * Deserializes the state + */ +export function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } +} +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; +} +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError?.(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if (isDone?.(response, state) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +export async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation?.(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } + } + return { response, status }; +} +/** Polls the long-running operation. */ +export async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!terminalStates.includes(status)) { + const intervalInMs = getPollingInterval?.(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation?.(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation?.(location, isUpdated); + } + else + withOperationLocation?.(operationLocation, false); + } + updateState?.(state, response); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/operation.js.map b/node_modules/@azure/core-lro/dist/react-native/poller/operation.js.map new file mode 100644 index 0000000..849e973 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/poller/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAUlC,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAEhD;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAC9B,eAAuB;IAEvB,IAAI,CAAC;QACH,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;IAC3C,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAI,KAAK,CAAC,sCAAsC,eAAe,EAAE,CAAC,CAAC;IAC3E,CAAC;AACH,CAAC;AAED,SAAS,aAAa,CAAkB,MAIvC;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,GAAG,MAAM,CAAC;IACvD,OAAO,CAAC,KAAY,EAAE,EAAE;QACtB,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC;YAC5B,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YAClC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QAC9B,CAAC;QACD,MAAM,KAAK,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;QAC9B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;IAC1B,CAAC;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED,SAAS,aAAa,CAAC,GAAa;IAIlC,IAAI,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC;IAC1B,IAAI,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACpB,IAAI,MAAM,GAAG,GAAiB,CAAC;IAC/B,OAAO,MAAM,CAAC,UAAU,EAAE,CAAC;QACzB,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC;QAC3B,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACnB,OAAO,GAAG,0BAA0B,CAAC,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAChE,CAAC;IACD,OAAO;QACL,IAAI;QACJ,OAAO;KACR,CAAC;AACJ,CAAC;AAED,SAAS,sBAAsB,CAA6B,MAS3D;IACC,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,QAAQ,EAAE,gBAAgB,EAAE,GAC9F,MAAM,CAAC;IACT,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,WAAW,CAAC,CAAC,CAAC;YACjB,UAAU,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;YAC/B,MAAM;QACR,CAAC;QACD,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,GAAG,GAAG,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC;YACjC,IAAI,OAAO,GAAG,EAAE,CAAC;YACjB,IAAI,GAAG,EAAE,CAAC;gBACR,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;gBAC7C,OAAO,GAAG,KAAK,IAAI,KAAK,OAAO,EAAE,CAAC;YACpC,CAAC;YACD,MAAM,MAAM,GAAG,wCAAwC,OAAO,EAAE,CAAC;YACjE,UAAU,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;YAC9C,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC5B,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACvB,MAAM;QACR,CAAC;QACD,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,UAAU,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;YAC9B,MAAM;QACR,CAAC;IACH,CAAC;IACD,IACE,MAAM,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC;QACzB,CAAC,MAAM,KAAK,SAAS;YACnB,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EACxF,CAAC;QACD,UAAU,CAAC,SAAS,CAClB,KAAK,EACL,WAAW,CAAC;YACV,QAAQ;YACR,KAAK;YACL,aAAa;SACd,CAAC,CACH,CAAC;IACJ,CAAC;AACH,CAAC;AAED,SAAS,WAAW,CAA6B,MAIhD;IACC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;IAClD,OAAO,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAE,QAA+B,CAAC;AAC3F,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAA6B,MAW/D;IACC,MAAM,EACJ,IAAI,EACJ,UAAU,EACV,aAAa,EACb,kBAAkB,EAClB,qBAAqB,EACrB,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC;IACjF,IAAI,iBAAiB;QAAE,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;IACzE,MAAM,MAAM,GAAG;QACb,QAAQ;QACR,iBAAiB;QACjB,gBAAgB;KACjB,CAAC;IACF,MAAM,CAAC,OAAO,CAAC,6BAA6B,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,KAAK,GAAG,UAAU,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,MAAM,GAAG,kBAAkB,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAC1E,sBAAsB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,gBAAgB,EAAE,aAAa,EAAE,CAAC,CAAC;IACjG,OAAO,KAAK,CAAC;AACf,CAAC;AAED,KAAK,UAAU,mBAAmB,CAAuC,MAexE;IAIC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,iBAAiB,EACjB,kBAAkB,EAClB,mBAAmB,EACnB,gBAAgB,EAChB,OAAO,GACR,GAAG,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC,KAAK,CAC3D,aAAa,CAAC;QACZ,KAAK;QACL,UAAU;QACV,gBAAgB;KACjB,CAAC,CACH,CAAC;IACF,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IACnD,MAAM,CAAC,OAAO,CACZ,iCACE,KAAK,CAAC,MAAM,CAAC,iBACf,yBAAyB,MAAM,uBAC7B,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAChD,EAAE,CACH,CAAC;IACF,IAAI,MAAM,KAAK,WAAW,EAAE,CAAC;QAC3B,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC9D,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,OAAO;gBACL,QAAQ,EAAE,MAAM,IAAI,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAC1C,aAAa,CAAC,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,CAAC,CACvD;gBACD,MAAM;aACP,CAAC;QACJ,CAAC;IACH,CAAC;IACD,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;AAC9B,CAAC;AAED,wCAAwC;AACxC,MAAM,CAAC,KAAK,UAAU,aAAa,CAAuC,MA0BzE;IACC,MAAM,EACJ,IAAI,EACJ,KAAK,EACL,UAAU,EACV,OAAO,EACP,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,aAAa,EACb,QAAQ,EACR,WAAW,EACX,QAAQ,EACR,MAAM,EACN,gBAAgB,GACjB,GAAG,MAAM,CAAC;IACX,MAAM,EAAE,iBAAiB,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;IAC3C,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;QACpC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAmB,CAAC;YACrD,IAAI;YACJ,kBAAkB;YAClB,KAAK;YACL,UAAU;YACV,iBAAiB;YACjB,mBAAmB;YACnB,gBAAgB;YAChB,OAAO;SACR,CAAC,CAAC;QACH,sBAAsB,CAAC;YACrB,MAAM;YACN,QAAQ;YACR,KAAK;YACL,UAAU;YACV,MAAM;YACN,aAAa;YACb,QAAQ;YACR,gBAAgB;SACjB,CAAC,CAAC;QAEH,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrC,MAAM,YAAY,GAAG,kBAAkB,EAAE,CAAC,QAAQ,CAAC,CAAC;YACpD,IAAI,YAAY;gBAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;YACzC,MAAM,QAAQ,GAAG,oBAAoB,EAAE,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YACzD,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;gBAC3B,MAAM,SAAS,GAAG,iBAAiB,KAAK,QAAQ,CAAC;gBACjD,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,QAAQ,CAAC;gBAC1C,qBAAqB,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;YAC/C,CAAC;;gBAAM,qBAAqB,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC3D,CAAC;QACD,WAAW,EAAE,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IACjC,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroError,\n InnerError,\n Operation,\n OperationStatus,\n RestorableOperationState,\n StateProxy,\n} from \"./models.js\";\nimport { logger } from \"../logger.js\";\nimport { terminalStates } from \"./constants.js\";\n\n/**\n * Deserializes the state\n */\nexport function deserializeState(\n serializedState: string,\n): RestorableOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`Unable to deserialize input state: ${serializedState}`);\n }\n}\n\nfunction setStateError(inputs: {\n state: TState;\n stateProxy: StateProxy;\n isOperationError: (error: Error) => boolean;\n}): (error: Error) => never {\n const { state, stateProxy, isOperationError } = inputs;\n return (error: Error) => {\n if (isOperationError(error)) {\n stateProxy.setError(state, error);\n stateProxy.setFailed(state);\n }\n throw error;\n };\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\nfunction simplifyError(err: LroError): {\n code: string;\n message: string;\n} {\n let message = err.message;\n let code = err.code;\n let curErr = err as InnerError;\n while (curErr.innererror) {\n curErr = curErr.innererror;\n code = curErr.code;\n message = appendReadableErrorMessage(message, curErr.message);\n }\n return {\n code,\n message,\n };\n}\n\nfunction processOperationStatus(result: {\n status: OperationStatus;\n response: TResponse;\n state: RestorableOperationState;\n stateProxy: StateProxy;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n}): void {\n const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } =\n result;\n switch (status) {\n case \"succeeded\": {\n stateProxy.setSucceeded(state);\n break;\n }\n case \"failed\": {\n const err = getError?.(response);\n let postfix = \"\";\n if (err) {\n const { code, message } = simplifyError(err);\n postfix = `. ${code}. ${message}`;\n }\n const errStr = `The long-running operation has failed${postfix}`;\n stateProxy.setError(state, new Error(errStr));\n stateProxy.setFailed(state);\n logger.warning(errStr);\n break;\n }\n case \"canceled\": {\n stateProxy.setCanceled(state);\n break;\n }\n }\n if (\n isDone?.(response, state) ||\n (isDone === undefined &&\n [\"succeeded\", \"canceled\"].concat(setErrorAsResult ? [] : [\"failed\"]).includes(status))\n ) {\n stateProxy.setResult(\n state,\n buildResult({\n response,\n state,\n processResult,\n }),\n );\n }\n}\n\nfunction buildResult(inputs: {\n response: TResponse;\n state: TState;\n processResult?: (result: TResponse, state: TState) => TResult;\n}): TResult {\n const { processResult, response, state } = inputs;\n return processResult ? processResult(response, state) : (response as unknown as TResult);\n}\n\n/**\n * Initiates the long-running operation.\n */\nexport async function initOperation(inputs: {\n init: Operation[\"init\"];\n stateProxy: StateProxy;\n getOperationStatus: (inputs: {\n response: TResponse;\n state: RestorableOperationState;\n operationLocation?: string;\n }) => OperationStatus;\n processResult?: (result: TResponse, state: TState) => TResult;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n setErrorAsResult: boolean;\n}): Promise> {\n const {\n init,\n stateProxy,\n processResult,\n getOperationStatus,\n withOperationLocation,\n setErrorAsResult,\n } = inputs;\n const { operationLocation, resourceLocation, metadata, response } = await init();\n if (operationLocation) withOperationLocation?.(operationLocation, false);\n const config = {\n metadata,\n operationLocation,\n resourceLocation,\n };\n logger.verbose(`LRO: Operation description:`, config);\n const state = stateProxy.initState(config);\n const status = getOperationStatus({ response, state, operationLocation });\n processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult });\n return state;\n}\n\nasync function pollOperationHelper(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n operationLocation: string;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n options?: TOptions;\n}): Promise<{\n status: OperationStatus;\n response: TResponse;\n}> {\n const {\n poll,\n state,\n stateProxy,\n operationLocation,\n getOperationStatus,\n getResourceLocation,\n isOperationError,\n options,\n } = inputs;\n const response = await poll(operationLocation, options).catch(\n setStateError({\n state,\n stateProxy,\n isOperationError,\n }),\n );\n const status = getOperationStatus(response, state);\n logger.verbose(\n `LRO: Status:\\n\\tPolling from: ${\n state.config.operationLocation\n }\\n\\tOperation status: ${status}\\n\\tPolling status: ${\n terminalStates.includes(status) ? \"Stopped\" : \"Running\"\n }`,\n );\n if (status === \"succeeded\") {\n const resourceLocation = getResourceLocation(response, state);\n if (resourceLocation !== undefined) {\n return {\n response: await poll(resourceLocation).catch(\n setStateError({ state, stateProxy, isOperationError }),\n ),\n status,\n };\n }\n }\n return { response, status };\n}\n\n/** Polls the long-running operation. */\nexport async function pollOperation(inputs: {\n poll: Operation[\"poll\"];\n stateProxy: StateProxy;\n state: RestorableOperationState;\n getOperationStatus: (\n response: TResponse,\n state: RestorableOperationState,\n ) => OperationStatus;\n getResourceLocation: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n isOperationError: (error: Error) => boolean;\n getPollingInterval?: (response: TResponse) => number | undefined;\n setDelay: (intervalInMs: number) => void;\n getOperationLocation?: (\n response: TResponse,\n state: RestorableOperationState,\n ) => string | undefined;\n withOperationLocation?: (operationLocation: string, isUpdated: boolean) => void;\n processResult?: (result: TResponse, state: TState) => TResult;\n getError?: (response: TResponse) => LroError | undefined;\n updateState?: (state: TState, lastResponse: TResponse) => void;\n isDone?: (lastResponse: TResponse, state: TState) => boolean;\n setErrorAsResult: boolean;\n options?: TOptions;\n}): Promise {\n const {\n poll,\n state,\n stateProxy,\n options,\n getOperationStatus,\n getResourceLocation,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n processResult,\n getError,\n updateState,\n setDelay,\n isDone,\n setErrorAsResult,\n } = inputs;\n const { operationLocation } = state.config;\n if (operationLocation !== undefined) {\n const { response, status } = await pollOperationHelper({\n poll,\n getOperationStatus,\n state,\n stateProxy,\n operationLocation,\n getResourceLocation,\n isOperationError,\n options,\n });\n processOperationStatus({\n status,\n response,\n state,\n stateProxy,\n isDone,\n processResult,\n getError,\n setErrorAsResult,\n });\n\n if (!terminalStates.includes(status)) {\n const intervalInMs = getPollingInterval?.(response);\n if (intervalInMs) setDelay(intervalInMs);\n const location = getOperationLocation?.(response, state);\n if (location !== undefined) {\n const isUpdated = operationLocation !== location;\n state.config.operationLocation = location;\n withOperationLocation?.(location, isUpdated);\n } else withOperationLocation?.(operationLocation, false);\n }\n updateState?.(state, response);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/poller.d.ts b/node_modules/@azure/core-lro/dist/react-native/poller/poller.d.ts new file mode 100644 index 0000000..dc9e8fa --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/poller.d.ts @@ -0,0 +1,9 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +import { BuildCreatePollerOptions, CreatePollerOptions, Operation, OperationState, SimplePollerLike } from "./models.js"; +/** + * Returns a poller factory. + */ +export declare function buildCreatePoller>(inputs: BuildCreatePollerOptions): (lro: Operation, options?: CreatePollerOptions) => Promise>; +//# sourceMappingURL=poller.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/poller.d.ts.map b/node_modules/@azure/core-lro/dist/react-native/poller/poller.d.ts.map new file mode 100644 index 0000000..b300b2e --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/poller.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.d.ts","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EACL,wBAAwB,EACxB,mBAAmB,EACnB,SAAS,EACT,cAAc,EAEd,gBAAgB,EAEjB,MAAM,aAAa,CAAC;AA8BrB;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,SAAS,cAAc,CAAC,OAAO,CAAC,EAC1F,MAAM,EAAE,wBAAwB,CAAC,SAAS,EAAE,MAAM,CAAC,GAClD,CACD,GAAG,EAAE,SAAS,CAAC,SAAS,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,CAAC,EAC5D,OAAO,CAAC,EAAE,mBAAmB,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC,KACtD,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CA6J9C"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/poller.js b/node_modules/@azure/core-lro/dist/react-native/poller/poller.js new file mode 100644 index 0000000..5fc9b09 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/poller.js @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { deserializeState, initOperation, pollOperation } from "./operation.js"; +import { POLL_INTERVAL_IN_MS } from "./constants.js"; +import { delay } from "@azure/core-util"; +const createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => (state.status = "canceled"), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.status = "running"), + setSucceeded: (state) => (state.status = "succeeded"), + setFailed: (state) => (state.status = "failed"), + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded", +}); +/** + * Returns a poller factory. + */ +export function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback + ? (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() + : undefined; + const state = restoreFrom + ? deserializeState(restoreFrom) + : await initOperation({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === undefined, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state, + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + // In the future we can use AbortSignal.any() instead + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal?.aborted) { + abortController.abort(); + } + else if (!abortSignal.aborted) { + inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await delay(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } + finally { + inputAbortSignal?.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } + else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = undefined; + })), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } + else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await pollOperation({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful, + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + }, + }; + return poller; + }; +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/poller.js.map b/node_modules/@azure/core-lro/dist/react-native/poller/poller.js.map new file mode 100644 index 0000000..34902cf --- /dev/null +++ b/node_modules/@azure/core-lro/dist/react-native/poller/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../../src/poller/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC,OAAO,EAAE,gBAAgB,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AACrD,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC,MAAM,gBAAgB,GAGlB,GAAG,EAAE,CAAC,CAAC;IACT;;;;OAIG;IACH,SAAS,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAAQ;IAC7D,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,UAAU,CAAC;IACnD,QAAQ,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;IACjD,SAAS,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;IACrD,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,SAAS,CAAC;IACjD,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,WAAW,CAAC;IACrD,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC;IAE/C,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK;IAChC,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM;IAClC,UAAU,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,UAAU;IAClD,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,QAAQ;IAC9C,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;IAChD,WAAW,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK,WAAW;CACrD,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,UAAU,iBAAiB,CAC/B,MAAmD;IAKnD,MAAM,EACJ,oBAAoB,EACpB,4BAA4B,EAC5B,yBAAyB,EACzB,gBAAgB,EAChB,mBAAmB,EACnB,kBAAkB,EAClB,QAAQ,EACR,qBAAqB,GACtB,GAAG,MAAM,CAAC;IACX,OAAO,KAAK,EACV,EAAE,IAAI,EAAE,IAAI,EAA2D,EACvE,OAAyD,EACzD,EAAE;QACF,MAAM,EACJ,aAAa,EACb,WAAW,EACX,qBAAqB,EAAE,6BAA6B,EACpD,YAAY,GAAG,mBAAmB,EAClC,WAAW,GACZ,GAAG,OAAO,IAAI,EAAE,CAAC;QAClB,MAAM,UAAU,GAAG,gBAAgB,EAAmB,CAAC;QACvD,MAAM,qBAAqB,GAAG,6BAA6B;YACzD,CAAC,CAAC,CAAC,GAAG,EAAE;gBACJ,IAAI,MAAM,GAAG,KAAK,CAAC;gBACnB,OAAO,CAAC,iBAAyB,EAAE,SAAkB,EAAE,EAAE;oBACvD,IAAI,SAAS;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;yBAC3D,IAAI,CAAC,MAAM;wBAAE,6BAA6B,CAAC,iBAAiB,CAAC,CAAC;oBACnE,MAAM,GAAG,IAAI,CAAC;gBAChB,CAAC,CAAC;YACJ,CAAC,CAAC,EAAE;YACN,CAAC,CAAC,SAAS,CAAC;QACd,MAAM,KAAK,GAAqC,WAAW;YACzD,CAAC,CAAC,gBAAgB,CAAC,WAAW,CAAC;YAC/B,CAAC,CAAC,MAAM,aAAa,CAAC;gBAClB,IAAI;gBACJ,UAAU;gBACV,aAAa;gBACb,kBAAkB,EAAE,4BAA4B;gBAChD,qBAAqB;gBACrB,gBAAgB,EAAE,CAAC,qBAAqB;aACzC,CAAC,CAAC;QACP,IAAI,aAA2C,CAAC;QAChD,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAG9C,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAmB,CAAC;QAC5C,MAAM,oBAAoB,GAAG,KAAK,IAAmB,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;QAC1F,MAAM,YAAY,GAAG,wBAAwB,CAAC;QAC9C,IAAI,uBAAuB,GAAG,YAAY,CAAC;QAE3C,MAAM,MAAM,GAAsC;YAChD,iBAAiB,EAAE,GAAG,EAAE,CAAC,KAAK;YAC9B,SAAS,EAAE,GAAG,EAAE,CAAC,KAAK,CAAC,MAAM;YAC7B,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,WAAW,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC;YACxE,SAAS,EAAE,GAAG,EAAE,CAAC,aAAa,KAAK,SAAS;YAC5C,WAAW,EAAE,GAAG,EAAE;gBAChB,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC;YACD,QAAQ,EAAE,GAAG,EAAE,CACb,IAAI,CAAC,SAAS,CAAC;gBACb,KAAK;aACN,CAAC;YACJ,UAAU,EAAE,CAAC,QAAiC,EAAE,EAAE;gBAChD,MAAM,CAAC,GAAG,MAAM,EAAE,CAAC;gBACnB,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;gBAC1B,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;YACD,aAAa,EAAE,CAAC,WAA+C,EAAE,EAAE,CACjE,CAAC,aAAa,KAAK,CAAC,KAAK,IAAI,EAAE;gBAC7B,MAAM,EAAE,WAAW,EAAE,gBAAgB,EAAE,GAAG,WAAW,IAAI,EAAE,CAAC;gBAC5D,qDAAqD;gBACrD,SAAS,aAAa;oBACpB,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;gBACD,MAAM,WAAW,GAAG,eAAe,CAAC,MAAM,CAAC;gBAC3C,IAAI,gBAAgB,EAAE,OAAO,EAAE,CAAC;oBAC9B,eAAe,CAAC,KAAK,EAAE,CAAC;gBAC1B,CAAC;qBAAM,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;oBAChC,gBAAgB,EAAE,gBAAgB,CAAC,OAAO,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBAED,IAAI,CAAC;oBACH,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;wBACrB,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACnC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC;4BACxB,MAAM,KAAK,CAAC,uBAAuB,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;4BACtD,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,CAAC,CAAC;wBACrC,CAAC;oBACH,CAAC;gBACH,CAAC;wBAAS,CAAC;oBACT,gBAAgB,EAAE,mBAAmB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;gBAChE,CAAC;gBACD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;gBACvC,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO,MAAM,CAAC,SAAS,EAAa,CAAC;wBACvC,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;wBACpB,KAAK,YAAY,CAAC;wBAClB,KAAK,SAAS;4BACZ,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;oBACvE,CAAC;gBACH,CAAC;YACH,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE;gBAChB,aAAa,GAAG,SAAS,CAAC;YAC5B,CAAC,CAAC,CAAC;YACL,KAAK,CAAC,IAAI,CAAC,WAA+C;gBACxD,IAAI,qBAAqB,EAAE,CAAC;oBAC1B,IAAI,MAAM,CAAC,MAAM,EAAE;wBAAE,OAAO;gBAC9B,CAAC;qBAAM,CAAC;oBACN,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,WAAW;4BACd,OAAO;wBACT,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,CAAC;oBAClB,IAAI;oBACJ,KAAK;oBACL,UAAU;oBACV,oBAAoB;oBACpB,gBAAgB;oBAChB,qBAAqB;oBACrB,kBAAkB;oBAClB,kBAAkB,EAAE,yBAAyB;oBAC7C,mBAAmB;oBACnB,aAAa;oBACb,QAAQ;oBACR,WAAW;oBACX,OAAO,EAAE,WAAW;oBACpB,QAAQ,EAAE,CAAC,gBAAgB,EAAE,EAAE;wBAC7B,uBAAuB,GAAG,gBAAgB,CAAC;oBAC7C,CAAC;oBACD,gBAAgB,EAAE,CAAC,qBAAqB;iBACzC,CAAC,CAAC;gBACH,MAAM,oBAAoB,EAAE,CAAC;gBAC7B,IAAI,CAAC,qBAAqB,EAAE,CAAC;oBAC3B,QAAQ,KAAK,CAAC,MAAM,EAAE,CAAC;wBACrB,KAAK,UAAU;4BACb,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;wBAChC,KAAK,QAAQ;4BACX,MAAM,KAAK,CAAC,KAAK,CAAC;oBACtB,CAAC;gBACH,CAAC;YACH,CAAC;SACF,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n BuildCreatePollerOptions,\n CreatePollerOptions,\n Operation,\n OperationState,\n RestorableOperationState,\n SimplePollerLike,\n StateProxy,\n} from \"./models.js\";\nimport { deserializeState, initOperation, pollOperation } from \"./operation.js\";\nimport { POLL_INTERVAL_IN_MS } from \"./constants.js\";\nimport { delay } from \"@azure/core-util\";\n\nconst createStateProxy: >() => StateProxy<\n TState,\n TResult\n> = () => ({\n /**\n * The state at this point is created to be of type OperationState.\n * It will be updated later to be of type TState when the\n * customer-provided callback, `updateState`, is called during polling.\n */\n initState: (config) => ({ status: \"running\", config }) as any,\n setCanceled: (state) => (state.status = \"canceled\"),\n setError: (state, error) => (state.error = error),\n setResult: (state, result) => (state.result = result),\n setRunning: (state) => (state.status = \"running\"),\n setSucceeded: (state) => (state.status = \"succeeded\"),\n setFailed: (state) => (state.status = \"failed\"),\n\n getError: (state) => state.error,\n getResult: (state) => state.result,\n isCanceled: (state) => state.status === \"canceled\",\n isFailed: (state) => state.status === \"failed\",\n isRunning: (state) => state.status === \"running\",\n isSucceeded: (state) => state.status === \"succeeded\",\n});\n\n/**\n * Returns a poller factory.\n */\nexport function buildCreatePoller>(\n inputs: BuildCreatePollerOptions,\n): (\n lro: Operation,\n options?: CreatePollerOptions,\n) => Promise> {\n const {\n getOperationLocation,\n getStatusFromInitialResponse,\n getStatusFromPollResponse,\n isOperationError,\n getResourceLocation,\n getPollingInterval,\n getError,\n resolveOnUnsuccessful,\n } = inputs;\n return async (\n { init, poll }: Operation,\n options?: CreatePollerOptions,\n ) => {\n const {\n processResult,\n updateState,\n withOperationLocation: withOperationLocationCallback,\n intervalInMs = POLL_INTERVAL_IN_MS,\n restoreFrom,\n } = options || {};\n const stateProxy = createStateProxy();\n const withOperationLocation = withOperationLocationCallback\n ? (() => {\n let called = false;\n return (operationLocation: string, isUpdated: boolean) => {\n if (isUpdated) withOperationLocationCallback(operationLocation);\n else if (!called) withOperationLocationCallback(operationLocation);\n called = true;\n };\n })()\n : undefined;\n const state: RestorableOperationState = restoreFrom\n ? deserializeState(restoreFrom)\n : await initOperation({\n init,\n stateProxy,\n processResult,\n getOperationStatus: getStatusFromInitialResponse,\n withOperationLocation,\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n let resultPromise: Promise | undefined;\n const abortController = new AbortController();\n // Progress handlers\n type Handler = (state: TState) => void;\n const handlers = new Map();\n const handleProgressEvents = async (): Promise => handlers.forEach((h) => h(state));\n const cancelErrMsg = \"Operation was canceled\";\n let currentPollIntervalInMs = intervalInMs;\n\n const poller: SimplePollerLike = {\n getOperationState: () => state,\n getResult: () => state.result,\n isDone: () => [\"succeeded\", \"failed\", \"canceled\"].includes(state.status),\n isStopped: () => resultPromise === undefined,\n stopPolling: () => {\n abortController.abort();\n },\n toString: () =>\n JSON.stringify({\n state,\n }),\n onProgress: (callback: (state: TState) => void) => {\n const s = Symbol();\n handlers.set(s, callback);\n return () => handlers.delete(s);\n },\n pollUntilDone: (pollOptions?: { abortSignal?: AbortSignalLike }) =>\n (resultPromise ??= (async () => {\n const { abortSignal: inputAbortSignal } = pollOptions || {};\n // In the future we can use AbortSignal.any() instead\n function abortListener(): void {\n abortController.abort();\n }\n const abortSignal = abortController.signal;\n if (inputAbortSignal?.aborted) {\n abortController.abort();\n } else if (!abortSignal.aborted) {\n inputAbortSignal?.addEventListener(\"abort\", abortListener, { once: true });\n }\n\n try {\n if (!poller.isDone()) {\n await poller.poll({ abortSignal });\n while (!poller.isDone()) {\n await delay(currentPollIntervalInMs, { abortSignal });\n await poller.poll({ abortSignal });\n }\n }\n } finally {\n inputAbortSignal?.removeEventListener(\"abort\", abortListener);\n }\n if (resolveOnUnsuccessful) {\n return poller.getResult() as TResult;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return poller.getResult() as TResult;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n case \"notStarted\":\n case \"running\":\n throw new Error(`Polling completed without succeeding or failing`);\n }\n }\n })().finally(() => {\n resultPromise = undefined;\n })),\n async poll(pollOptions?: { abortSignal?: AbortSignalLike }): Promise {\n if (resolveOnUnsuccessful) {\n if (poller.isDone()) return;\n } else {\n switch (state.status) {\n case \"succeeded\":\n return;\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n await pollOperation({\n poll,\n state,\n stateProxy,\n getOperationLocation,\n isOperationError,\n withOperationLocation,\n getPollingInterval,\n getOperationStatus: getStatusFromPollResponse,\n getResourceLocation,\n processResult,\n getError,\n updateState,\n options: pollOptions,\n setDelay: (pollIntervalInMs) => {\n currentPollIntervalInMs = pollIntervalInMs;\n },\n setErrorAsResult: !resolveOnUnsuccessful,\n });\n await handleProgressEvents();\n if (!resolveOnUnsuccessful) {\n switch (state.status) {\n case \"canceled\":\n throw new Error(cancelErrMsg);\n case \"failed\":\n throw state.error;\n }\n }\n },\n };\n return poller;\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/LICENSE b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/README.md b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/README.md new file mode 100644 index 0000000..2c8a197 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/README.md @@ -0,0 +1,69 @@ +# Azure Abort Controller client library for JavaScript + +The `@azure/abort-controller` package provides `AbortSignalLike` interface and +`AbortError` classes to make it easier to work with the +[AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) + and the `AbortSignal` used by +[fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API) built into modern JavaScript platforms. + +Customers of Azure SDK for JavaScript in general do not need to use this library. Instead they +use `AbortController` and `AbortSignal` provided by their platforms and pass the abort signals to Azure SDK operations. + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller) +- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme) + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/abort-controller +``` + +## Key Concepts + +Use `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel +pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`. +An `AbortSignal` can also be returned directly from a static method, e.g. `AbortSignal.timeout(100)`. +that is cancelled after 100 milliseconds. + +## Examples + +The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is +of the abort signal. + +### Example 1 - basic usage + +```js +const controller = new AbortController(); +doAsyncWork({ abortSignal: controller.signal }); + +// at some point later +controller.abort(); +``` + +### Example 2 - Aborting with timeout + +```js +const signal = AbortSignal.timeout(1000); +doAsyncWork({ abortSignal: signal }); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fabort-controller%2FREADME.png) diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/abort-controller.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/abort-controller.d.ts new file mode 100644 index 0000000..902fd4d --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/abort-controller.d.ts @@ -0,0 +1,42 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} + +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export declare interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} + +export { } diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/package.json b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js new file mode 100644 index 0000000..c7a0749 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js @@ -0,0 +1,31 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map new file mode 100644 index 0000000..0514343 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAa,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AALD,gCAKC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js new file mode 100644 index 0000000..aa54840 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map new file mode 100644 index 0000000..5fea0c4 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.js new file mode 100644 index 0000000..b589ef0 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbortError = void 0; +var AbortError_js_1 = require("./AbortError.js"); +Object.defineProperty(exports, "AbortError", { enumerable: true, get: function () { return AbortError_js_1.AbortError; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.js.map new file mode 100644 index 0000000..48ef08c --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAMlC,iDAA6C;AAApC,2GAAA,UAAU,OAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/package.json b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/package.json b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.d.ts b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.js b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.js.map b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/package.json b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/package.json new file mode 100644 index 0000000..d8375e5 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/@azure/abort-controller/package.json @@ -0,0 +1,116 @@ +{ + "name": "@azure/abort-controller", + "sdk-type": "client", + "version": "2.1.1", + "description": "Microsoft Azure SDK for JavaScript - Aborter", + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sideEffects": false, + "type": "module", + "main": "./dist/commonjs/index.js", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=18.0.0" + }, + "keywords": [ + "azure", + "aborter", + "abortsignal", + "cancellation", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,mjs,cjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,mjs,cjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-lro/package.json b/node_modules/@azure/core-lro/package.json new file mode 100644 index 0000000..ac10fd5 --- /dev/null +++ b/node_modules/@azure/core-lro/package.json @@ -0,0 +1,129 @@ +{ + "name": "@azure/core-lro", + "author": "Microsoft Corporation", + "sdk-type": "client", + "type": "module", + "version": "2.7.1", + "description": "Isomorphic client library for supporting long-running operations in node.js and browser.", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist/", + "LICENSE", + "README.md" + ], + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "lro", + "polling" + ], + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "lro", + "polling", + "azure", + "cloud" + ], + "engines": { + "node": ">=18.0.0" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "sideEffects": false, + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* types *.log browser statistics.html coverage src/**/*.js test/**/*.js", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run build:test && npm run unit-test:node && npm run integration-test:node", + "test": "npm run build:test && npm run unit-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/core-rest-pipeline": "^1.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/dev-tool": "^1.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-paging/LICENSE b/node_modules/@azure/core-paging/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-paging/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-paging/README.md b/node_modules/@azure/core-paging/README.md new file mode 100644 index 0000000..5fb4977 --- /dev/null +++ b/node_modules/@azure/core-paging/README.md @@ -0,0 +1,61 @@ +# Azure Core Paging client library for JavaScript + +This library provides core types for paging async iterable iterators. + +## Getting started + +### Installation + +If using this as part of another project in the [azure-sdk-for-js](https://github.com/Azure/azure-sdk-for-js) repo, +then run `rush install` after cloning the repo. + +Otherwise, use npm to install this package in your application as follows + +```javascript +npm install @azure/core-paging +``` + +## Key concepts + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/ms-rest-js/blob/master/docs/architectureOverview.md). + +## Examples + +Example of building with the types: + +```typescript + public listSecrets( + options: ListSecretsOptions = {} + ): PagedAsyncIterableIterator { + const iter = this.listSecretsAll(options); + return { + async next() { return iter.next(); }, + [Symbol.asyncIterator]() { return this; }, + byPage: (settings: PageSettings = {}) => this.listSecretsPage(settings, options), + }; + } +``` + +And using the types: + +``` + for await (let page of client.listSecrets().byPage({ maxPageSize: 2 })) { + for (const secret of page) { + console.log("secret: ", secret); + } + } +``` + +## Next steps + +Try out this package in your application when dealing with async iterable iterators and provide feedback! + +## Troubleshooting + +Log an issue at https://github.com/Azure/azure-sdk-for-js/issues + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-paging%2FREADME.png) diff --git a/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.d.ts b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.d.ts new file mode 100644 index 0000000..deb559b --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.d.ts @@ -0,0 +1,10 @@ +import { PageSettings, PagedAsyncIterableIterator, PagedResult } from "./models.js"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +//# sourceMappingURL=getPagedAsyncIterator.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.d.ts.map b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.d.ts.map new file mode 100644 index 0000000..5e493e9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.d.ts","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAEpF;;;;;;GAMG;AAEH,wBAAgB,qBAAqB,CACnC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY,EAC5B,KAAK,GAAG,MAAM,EAEd,WAAW,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,EAAE,KAAK,CAAC,GACpD,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAmB5D"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js new file mode 100644 index 0000000..d9db4b9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export function getPagedAsyncIterator(pagedResult) { + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: pagedResult?.byPage ?? + ((settings) => { + const { continuationToken, maxPageSize } = settings ?? {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +async function* getItemAsyncIterator(pagedResult) { + const pages = getPageAsyncIterator(pagedResult); + const firstVal = await pages.next(); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield* toElements(firstVal.value); + for await (const page of pages) { + yield* toElements(page); + } + } + else { + yield firstVal.value; + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield* pages; + } + } + else { + yield* firstVal.value; + for await (const page of pages) { + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield* page; + } + } +} +async function* getPageAsyncIterator(pagedResult, options = {}) { + const { pageLink, maxPageSize } = options; + let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + while (response.nextPageLink) { + response = await pagedResult.getPage(response.nextPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + } +} +//# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js.map b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js.map new file mode 100644 index 0000000..d40e079 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.js","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;;;GAMG;AAEH,MAAM,UAAU,qBAAqB,CAMnC,WAAqD;IAErD,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI;YACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,EACJ,WAAW,EAAE,MAAM;YAClB,CAAC,CAAC,QAAuB,EAAE,EAAE;gBAC5B,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,IAAI,EAAE,CAAC;gBAC1D,OAAO,oBAAoB,CAAC,WAAW,EAAE;oBACvC,QAAQ,EAAE,iBAAiD;oBAC3D,WAAW;iBACZ,CAAC,CAAC;YACL,CAAC,CAA2E;KAC/E,CAAC;AACJ,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD;IAErD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;IACpC,6FAA6F;IAC7F,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACnC,sCAAsC;QACtC,MAAM,EAAE,UAAU,EAAE,GAAG,WAAW,CAAC;QACnC,IAAI,UAAU,EAAE,CAAC;YACf,KAAK,CAAC,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAe,CAAC;YAChD,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBAC/B,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,CAAe,CAAC;YACxC,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,QAAQ,CAAC,KAAK,CAAC;YACrB,sFAAsF;YACtF,KAAK,CAAC,CAAC,KAAmD,CAAC;QAC7D,CAAC;IACH,CAAC;SAAM,CAAC;QACN,KAAK,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC;QACtB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YAC/B,gGAAgG;YAChG,gDAAgD;YAChD,KAAK,CAAC,CAAC,IAA6B,CAAC;QACvC,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE;IAEN,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;IAC1C,IAAI,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,IAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;IAC7F,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,OAAO;IACT,CAAC;IACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACpB,OAAO,QAAQ,CAAC,YAAY,EAAE,CAAC;QAC7B,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;QACzE,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,OAAO;QACT,CAAC;QACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACtB,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models.js\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\n\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string,\n>(\n pagedResult: PagedResult,\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n (((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }) as unknown as (settings?: TPageSettings) => AsyncIterableIterator),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult,\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n // can extract elements from this page\n const { toElements } = pagedResult;\n if (toElements) {\n yield* toElements(firstVal.value) as TElement[];\n for await (const page of pages) {\n yield* toElements(page) as TElement[];\n }\n } else {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n }\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {},\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/index.d.ts b/node_modules/@azure/core-paging/dist/browser/index.d.ts new file mode 100644 index 0000000..5712e6d --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/index.d.ts @@ -0,0 +1,3 @@ +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/index.d.ts.map b/node_modules/@azure/core-paging/dist/browser/index.d.ts.map new file mode 100644 index 0000000..66a03ce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/index.js b/node_modules/@azure/core-paging/dist/browser/index.js new file mode 100644 index 0000000..4dca636 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/index.js.map b/node_modules/@azure/core-paging/dist/browser/index.js.map new file mode 100644 index 0000000..800f434 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./models.js\";\nexport * from \"./getPagedAsyncIterator.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/models.d.ts b/node_modules/@azure/core-paging/dist/browser/models.d.ts new file mode 100644 index 0000000..5cc85f2 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/models.d.ts @@ -0,0 +1,69 @@ +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: TPageSettings) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + } | undefined>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; + /** + * A function to extract elements from a page. + */ + toElements?: (page: TPage) => unknown[]; +} +/** + * Paged collection of T items + */ +export type Paged = { + /** + * The T items on this page + */ + value: T[]; + /** + * The link to the next page of items + */ + nextLink?: string; +}; +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/models.d.ts.map b/node_modules/@azure/core-paging/dist/browser/models.d.ts.map new file mode 100644 index 0000000..efa9bce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AACD;;GAEG;AACH,MAAM,WAAW,0BAA0B,CACzC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY;IAE5B;;OAEG;IACH,IAAI,IAAI,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC1C;;OAEG;IACH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAAC;IACrF;;OAEG;IACH,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,KAAK,EAAE,aAAa,GAAG,YAAY,EAAE,KAAK,GAAG,MAAM;IAC9E;;OAEG;IACH,aAAa,EAAE,KAAK,CAAC;IACrB;;OAEG;IACH,OAAO,EAAE,CACP,QAAQ,EAAE,KAAK,EACf,WAAW,CAAC,EAAE,MAAM,KACjB,OAAO,CAAC;QAAE,IAAI,EAAE,KAAK,CAAC;QAAC,YAAY,CAAC,EAAE,KAAK,CAAA;KAAE,GAAG,SAAS,CAAC,CAAC;IAChE;;;OAGG;IACH,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;IAEpE;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,EAAE,CAAC;CACzC;AAED;;GAEG;AACH,MAAM,MAAM,KAAK,CAAC,CAAC,IAAI;IACrB;;OAEG;IACH,KAAK,EAAE,CAAC,EAAE,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/models.js b/node_modules/@azure/core-paging/dist/browser/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/models.js.map b/node_modules/@azure/core-paging/dist/browser/models.js.map new file mode 100644 index 0000000..a64e673 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * An interface that tracks the settings for paged iteration\n */\nexport interface PageSettings {\n /**\n * The token that keeps track of where to continue the iterator\n */\n continuationToken?: string;\n /**\n * The size of the page during paged iteration\n */\n maxPageSize?: number;\n}\n/**\n * An interface that allows async iterable iteration both to completion and by page.\n */\nexport interface PagedAsyncIterableIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n> {\n /**\n * The next method, part of the iteration protocol\n */\n next(): Promise>;\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator](): PagedAsyncIterableIterator;\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings?: TPageSettings) => AsyncIterableIterator;\n}\n\n/**\n * An interface that describes how to communicate with the service.\n */\nexport interface PagedResult {\n /**\n * Link to the first page of results.\n */\n firstPageLink: TLink;\n /**\n * A method that returns a page of results.\n */\n getPage: (\n pageLink: TLink,\n maxPageSize?: number,\n ) => Promise<{ page: TPage; nextPageLink?: TLink } | undefined>;\n /**\n * a function to implement the `byPage` method on the paged async iterator. The default is\n * one that sets the `maxPageSizeParam` from `settings.maxPageSize`.\n */\n byPage?: (settings?: TPageSettings) => AsyncIterableIterator;\n\n /**\n * A function to extract elements from a page.\n */\n toElements?: (page: TPage) => unknown[];\n}\n\n/**\n * Paged collection of T items\n */\nexport type Paged = {\n /**\n * The T items on this page\n */\n value: T[];\n /**\n * The link to the next page of items\n */\n nextLink?: string;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/browser/package.json b/node_modules/@azure/core-paging/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-paging/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.d.ts b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.d.ts new file mode 100644 index 0000000..deb559b --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.d.ts @@ -0,0 +1,10 @@ +import { PageSettings, PagedAsyncIterableIterator, PagedResult } from "./models.js"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +//# sourceMappingURL=getPagedAsyncIterator.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.d.ts.map b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.d.ts.map new file mode 100644 index 0000000..5e493e9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.d.ts","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAEpF;;;;;;GAMG;AAEH,wBAAgB,qBAAqB,CACnC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY,EAC5B,KAAK,GAAG,MAAM,EAEd,WAAW,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,EAAE,KAAK,CAAC,GACpD,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAmB5D"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js new file mode 100644 index 0000000..6e3c897 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js @@ -0,0 +1,76 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPagedAsyncIterator = void 0; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +function getPagedAsyncIterator(pagedResult) { + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: pagedResult?.byPage ?? + ((settings) => { + const { continuationToken, maxPageSize } = settings ?? {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +exports.getPagedAsyncIterator = getPagedAsyncIterator; +async function* getItemAsyncIterator(pagedResult) { + const pages = getPageAsyncIterator(pagedResult); + const firstVal = await pages.next(); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield* toElements(firstVal.value); + for await (const page of pages) { + yield* toElements(page); + } + } + else { + yield firstVal.value; + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield* pages; + } + } + else { + yield* firstVal.value; + for await (const page of pages) { + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield* page; + } + } +} +async function* getPageAsyncIterator(pagedResult, options = {}) { + const { pageLink, maxPageSize } = options; + let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + while (response.nextPageLink) { + response = await pagedResult.getPage(response.nextPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + } +} +//# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js.map b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js.map new file mode 100644 index 0000000..f0b8013 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.js","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAIlC;;;;;;GAMG;AAEH,SAAgB,qBAAqB,CAMnC,WAAqD;IAErD,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI;YACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,EACJ,WAAW,EAAE,MAAM;YAClB,CAAC,CAAC,QAAuB,EAAE,EAAE;gBAC5B,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,IAAI,EAAE,CAAC;gBAC1D,OAAO,oBAAoB,CAAC,WAAW,EAAE;oBACvC,QAAQ,EAAE,iBAAiD;oBAC3D,WAAW;iBACZ,CAAC,CAAC;YACL,CAAC,CAA2E;KAC/E,CAAC;AACJ,CAAC;AA1BD,sDA0BC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD;IAErD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;IACpC,6FAA6F;IAC7F,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACnC,sCAAsC;QACtC,MAAM,EAAE,UAAU,EAAE,GAAG,WAAW,CAAC;QACnC,IAAI,UAAU,EAAE,CAAC;YACf,KAAK,CAAC,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAe,CAAC;YAChD,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBAC/B,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,CAAe,CAAC;YACxC,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,QAAQ,CAAC,KAAK,CAAC;YACrB,sFAAsF;YACtF,KAAK,CAAC,CAAC,KAAmD,CAAC;QAC7D,CAAC;IACH,CAAC;SAAM,CAAC;QACN,KAAK,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC;QACtB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YAC/B,gGAAgG;YAChG,gDAAgD;YAChD,KAAK,CAAC,CAAC,IAA6B,CAAC;QACvC,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE;IAEN,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;IAC1C,IAAI,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,IAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;IAC7F,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,OAAO;IACT,CAAC;IACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACpB,OAAO,QAAQ,CAAC,YAAY,EAAE,CAAC;QAC7B,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;QACzE,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,OAAO;QACT,CAAC;QACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACtB,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models.js\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\n\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string,\n>(\n pagedResult: PagedResult,\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n (((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }) as unknown as (settings?: TPageSettings) => AsyncIterableIterator),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult,\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n // can extract elements from this page\n const { toElements } = pagedResult;\n if (toElements) {\n yield* toElements(firstVal.value) as TElement[];\n for await (const page of pages) {\n yield* toElements(page) as TElement[];\n }\n } else {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n }\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {},\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/index.d.ts b/node_modules/@azure/core-paging/dist/commonjs/index.d.ts new file mode 100644 index 0000000..5712e6d --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/index.d.ts @@ -0,0 +1,3 @@ +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-paging/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..66a03ce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/index.js b/node_modules/@azure/core-paging/dist/commonjs/index.js new file mode 100644 index 0000000..1b40023 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/index.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +const tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./models.js"), exports); +tslib_1.__exportStar(require("./getPagedAsyncIterator.js"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/index.js.map b/node_modules/@azure/core-paging/dist/commonjs/index.js.map new file mode 100644 index 0000000..64edba8 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,sDAA4B;AAC5B,qEAA2C","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./models.js\";\nexport * from \"./getPagedAsyncIterator.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/models.d.ts b/node_modules/@azure/core-paging/dist/commonjs/models.d.ts new file mode 100644 index 0000000..5cc85f2 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/models.d.ts @@ -0,0 +1,69 @@ +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: TPageSettings) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + } | undefined>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; + /** + * A function to extract elements from a page. + */ + toElements?: (page: TPage) => unknown[]; +} +/** + * Paged collection of T items + */ +export type Paged = { + /** + * The T items on this page + */ + value: T[]; + /** + * The link to the next page of items + */ + nextLink?: string; +}; +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/models.d.ts.map b/node_modules/@azure/core-paging/dist/commonjs/models.d.ts.map new file mode 100644 index 0000000..efa9bce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AACD;;GAEG;AACH,MAAM,WAAW,0BAA0B,CACzC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY;IAE5B;;OAEG;IACH,IAAI,IAAI,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC1C;;OAEG;IACH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAAC;IACrF;;OAEG;IACH,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,KAAK,EAAE,aAAa,GAAG,YAAY,EAAE,KAAK,GAAG,MAAM;IAC9E;;OAEG;IACH,aAAa,EAAE,KAAK,CAAC;IACrB;;OAEG;IACH,OAAO,EAAE,CACP,QAAQ,EAAE,KAAK,EACf,WAAW,CAAC,EAAE,MAAM,KACjB,OAAO,CAAC;QAAE,IAAI,EAAE,KAAK,CAAC;QAAC,YAAY,CAAC,EAAE,KAAK,CAAA;KAAE,GAAG,SAAS,CAAC,CAAC;IAChE;;;OAGG;IACH,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;IAEpE;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,EAAE,CAAC;CACzC;AAED;;GAEG;AACH,MAAM,MAAM,KAAK,CAAC,CAAC,IAAI;IACrB;;OAEG;IACH,KAAK,EAAE,CAAC,EAAE,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/models.js b/node_modules/@azure/core-paging/dist/commonjs/models.js new file mode 100644 index 0000000..783931f --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/models.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/models.js.map b/node_modules/@azure/core-paging/dist/commonjs/models.js.map new file mode 100644 index 0000000..33a8eca --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * An interface that tracks the settings for paged iteration\n */\nexport interface PageSettings {\n /**\n * The token that keeps track of where to continue the iterator\n */\n continuationToken?: string;\n /**\n * The size of the page during paged iteration\n */\n maxPageSize?: number;\n}\n/**\n * An interface that allows async iterable iteration both to completion and by page.\n */\nexport interface PagedAsyncIterableIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n> {\n /**\n * The next method, part of the iteration protocol\n */\n next(): Promise>;\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator](): PagedAsyncIterableIterator;\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings?: TPageSettings) => AsyncIterableIterator;\n}\n\n/**\n * An interface that describes how to communicate with the service.\n */\nexport interface PagedResult {\n /**\n * Link to the first page of results.\n */\n firstPageLink: TLink;\n /**\n * A method that returns a page of results.\n */\n getPage: (\n pageLink: TLink,\n maxPageSize?: number,\n ) => Promise<{ page: TPage; nextPageLink?: TLink } | undefined>;\n /**\n * a function to implement the `byPage` method on the paged async iterator. The default is\n * one that sets the `maxPageSizeParam` from `settings.maxPageSize`.\n */\n byPage?: (settings?: TPageSettings) => AsyncIterableIterator;\n\n /**\n * A function to extract elements from a page.\n */\n toElements?: (page: TPage) => unknown[];\n}\n\n/**\n * Paged collection of T items\n */\nexport type Paged = {\n /**\n * The T items on this page\n */\n value: T[];\n /**\n * The link to the next page of items\n */\n nextLink?: string;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/package.json b/node_modules/@azure/core-paging/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-paging/dist/core-paging.d.ts b/node_modules/@azure/core-paging/dist/core-paging.d.ts new file mode 100644 index 0000000..a0ab819 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/core-paging.d.ts @@ -0,0 +1,82 @@ +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; + +/** + * Paged collection of T items + */ +export declare type Paged = { + /** + * The T items on this page + */ + value: T[]; + /** + * The link to the next page of items + */ + nextLink?: string; +}; + +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export declare interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: TPageSettings) => AsyncIterableIterator; +} + +/** + * An interface that describes how to communicate with the service. + */ +export declare interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + } | undefined>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; + /** + * A function to extract elements from a page. + */ + toElements?: (page: TPage) => unknown[]; +} + +/** + * An interface that tracks the settings for paged iteration + */ +export declare interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} + +export { } diff --git a/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.d.ts b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.d.ts new file mode 100644 index 0000000..deb559b --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.d.ts @@ -0,0 +1,10 @@ +import { PageSettings, PagedAsyncIterableIterator, PagedResult } from "./models.js"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +//# sourceMappingURL=getPagedAsyncIterator.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.d.ts.map b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.d.ts.map new file mode 100644 index 0000000..5e493e9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.d.ts","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAEpF;;;;;;GAMG;AAEH,wBAAgB,qBAAqB,CACnC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY,EAC5B,KAAK,GAAG,MAAM,EAEd,WAAW,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,EAAE,KAAK,CAAC,GACpD,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAmB5D"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js new file mode 100644 index 0000000..d9db4b9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export function getPagedAsyncIterator(pagedResult) { + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: pagedResult?.byPage ?? + ((settings) => { + const { continuationToken, maxPageSize } = settings ?? {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +async function* getItemAsyncIterator(pagedResult) { + const pages = getPageAsyncIterator(pagedResult); + const firstVal = await pages.next(); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield* toElements(firstVal.value); + for await (const page of pages) { + yield* toElements(page); + } + } + else { + yield firstVal.value; + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield* pages; + } + } + else { + yield* firstVal.value; + for await (const page of pages) { + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield* page; + } + } +} +async function* getPageAsyncIterator(pagedResult, options = {}) { + const { pageLink, maxPageSize } = options; + let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + while (response.nextPageLink) { + response = await pagedResult.getPage(response.nextPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + } +} +//# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js.map b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js.map new file mode 100644 index 0000000..d40e079 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.js","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;;;GAMG;AAEH,MAAM,UAAU,qBAAqB,CAMnC,WAAqD;IAErD,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI;YACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,EACJ,WAAW,EAAE,MAAM;YAClB,CAAC,CAAC,QAAuB,EAAE,EAAE;gBAC5B,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,IAAI,EAAE,CAAC;gBAC1D,OAAO,oBAAoB,CAAC,WAAW,EAAE;oBACvC,QAAQ,EAAE,iBAAiD;oBAC3D,WAAW;iBACZ,CAAC,CAAC;YACL,CAAC,CAA2E;KAC/E,CAAC;AACJ,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD;IAErD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;IACpC,6FAA6F;IAC7F,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACnC,sCAAsC;QACtC,MAAM,EAAE,UAAU,EAAE,GAAG,WAAW,CAAC;QACnC,IAAI,UAAU,EAAE,CAAC;YACf,KAAK,CAAC,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAe,CAAC;YAChD,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBAC/B,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,CAAe,CAAC;YACxC,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,QAAQ,CAAC,KAAK,CAAC;YACrB,sFAAsF;YACtF,KAAK,CAAC,CAAC,KAAmD,CAAC;QAC7D,CAAC;IACH,CAAC;SAAM,CAAC;QACN,KAAK,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC;QACtB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YAC/B,gGAAgG;YAChG,gDAAgD;YAChD,KAAK,CAAC,CAAC,IAA6B,CAAC;QACvC,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE;IAEN,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;IAC1C,IAAI,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,IAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;IAC7F,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,OAAO;IACT,CAAC;IACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACpB,OAAO,QAAQ,CAAC,YAAY,EAAE,CAAC;QAC7B,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;QACzE,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,OAAO;QACT,CAAC;QACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACtB,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models.js\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\n\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string,\n>(\n pagedResult: PagedResult,\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n (((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }) as unknown as (settings?: TPageSettings) => AsyncIterableIterator),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult,\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n // can extract elements from this page\n const { toElements } = pagedResult;\n if (toElements) {\n yield* toElements(firstVal.value) as TElement[];\n for await (const page of pages) {\n yield* toElements(page) as TElement[];\n }\n } else {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n }\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {},\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/index.d.ts b/node_modules/@azure/core-paging/dist/esm/index.d.ts new file mode 100644 index 0000000..5712e6d --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/index.d.ts @@ -0,0 +1,3 @@ +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/index.d.ts.map b/node_modules/@azure/core-paging/dist/esm/index.d.ts.map new file mode 100644 index 0000000..66a03ce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/index.js b/node_modules/@azure/core-paging/dist/esm/index.js new file mode 100644 index 0000000..4dca636 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/index.js.map b/node_modules/@azure/core-paging/dist/esm/index.js.map new file mode 100644 index 0000000..800f434 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./models.js\";\nexport * from \"./getPagedAsyncIterator.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/models.d.ts b/node_modules/@azure/core-paging/dist/esm/models.d.ts new file mode 100644 index 0000000..5cc85f2 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/models.d.ts @@ -0,0 +1,69 @@ +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: TPageSettings) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + } | undefined>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; + /** + * A function to extract elements from a page. + */ + toElements?: (page: TPage) => unknown[]; +} +/** + * Paged collection of T items + */ +export type Paged = { + /** + * The T items on this page + */ + value: T[]; + /** + * The link to the next page of items + */ + nextLink?: string; +}; +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/models.d.ts.map b/node_modules/@azure/core-paging/dist/esm/models.d.ts.map new file mode 100644 index 0000000..efa9bce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AACD;;GAEG;AACH,MAAM,WAAW,0BAA0B,CACzC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY;IAE5B;;OAEG;IACH,IAAI,IAAI,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC1C;;OAEG;IACH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAAC;IACrF;;OAEG;IACH,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,KAAK,EAAE,aAAa,GAAG,YAAY,EAAE,KAAK,GAAG,MAAM;IAC9E;;OAEG;IACH,aAAa,EAAE,KAAK,CAAC;IACrB;;OAEG;IACH,OAAO,EAAE,CACP,QAAQ,EAAE,KAAK,EACf,WAAW,CAAC,EAAE,MAAM,KACjB,OAAO,CAAC;QAAE,IAAI,EAAE,KAAK,CAAC;QAAC,YAAY,CAAC,EAAE,KAAK,CAAA;KAAE,GAAG,SAAS,CAAC,CAAC;IAChE;;;OAGG;IACH,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;IAEpE;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,EAAE,CAAC;CACzC;AAED;;GAEG;AACH,MAAM,MAAM,KAAK,CAAC,CAAC,IAAI;IACrB;;OAEG;IACH,KAAK,EAAE,CAAC,EAAE,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/models.js b/node_modules/@azure/core-paging/dist/esm/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/models.js.map b/node_modules/@azure/core-paging/dist/esm/models.js.map new file mode 100644 index 0000000..a64e673 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * An interface that tracks the settings for paged iteration\n */\nexport interface PageSettings {\n /**\n * The token that keeps track of where to continue the iterator\n */\n continuationToken?: string;\n /**\n * The size of the page during paged iteration\n */\n maxPageSize?: number;\n}\n/**\n * An interface that allows async iterable iteration both to completion and by page.\n */\nexport interface PagedAsyncIterableIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n> {\n /**\n * The next method, part of the iteration protocol\n */\n next(): Promise>;\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator](): PagedAsyncIterableIterator;\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings?: TPageSettings) => AsyncIterableIterator;\n}\n\n/**\n * An interface that describes how to communicate with the service.\n */\nexport interface PagedResult {\n /**\n * Link to the first page of results.\n */\n firstPageLink: TLink;\n /**\n * A method that returns a page of results.\n */\n getPage: (\n pageLink: TLink,\n maxPageSize?: number,\n ) => Promise<{ page: TPage; nextPageLink?: TLink } | undefined>;\n /**\n * a function to implement the `byPage` method on the paged async iterator. The default is\n * one that sets the `maxPageSizeParam` from `settings.maxPageSize`.\n */\n byPage?: (settings?: TPageSettings) => AsyncIterableIterator;\n\n /**\n * A function to extract elements from a page.\n */\n toElements?: (page: TPage) => unknown[];\n}\n\n/**\n * Paged collection of T items\n */\nexport type Paged = {\n /**\n * The T items on this page\n */\n value: T[];\n /**\n * The link to the next page of items\n */\n nextLink?: string;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/esm/package.json b/node_modules/@azure/core-paging/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-paging/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.d.ts b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.d.ts new file mode 100644 index 0000000..deb559b --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.d.ts @@ -0,0 +1,10 @@ +import { PageSettings, PagedAsyncIterableIterator, PagedResult } from "./models.js"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +//# sourceMappingURL=getPagedAsyncIterator.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.d.ts.map b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.d.ts.map new file mode 100644 index 0000000..5e493e9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.d.ts","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAEpF;;;;;;GAMG;AAEH,wBAAgB,qBAAqB,CACnC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY,EAC5B,KAAK,GAAG,MAAM,EAEd,WAAW,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,EAAE,KAAK,CAAC,GACpD,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAmB5D"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js new file mode 100644 index 0000000..d9db4b9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export function getPagedAsyncIterator(pagedResult) { + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: pagedResult?.byPage ?? + ((settings) => { + const { continuationToken, maxPageSize } = settings ?? {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +async function* getItemAsyncIterator(pagedResult) { + const pages = getPageAsyncIterator(pagedResult); + const firstVal = await pages.next(); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield* toElements(firstVal.value); + for await (const page of pages) { + yield* toElements(page); + } + } + else { + yield firstVal.value; + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield* pages; + } + } + else { + yield* firstVal.value; + for await (const page of pages) { + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield* page; + } + } +} +async function* getPageAsyncIterator(pagedResult, options = {}) { + const { pageLink, maxPageSize } = options; + let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + while (response.nextPageLink) { + response = await pagedResult.getPage(response.nextPageLink, maxPageSize); + if (!response) { + return; + } + yield response.page; + } +} +//# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js.map b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js.map new file mode 100644 index 0000000..d40e079 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.js","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;;;GAMG;AAEH,MAAM,UAAU,qBAAqB,CAMnC,WAAqD;IAErD,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI;YACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,EACJ,WAAW,EAAE,MAAM;YAClB,CAAC,CAAC,QAAuB,EAAE,EAAE;gBAC5B,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,IAAI,EAAE,CAAC;gBAC1D,OAAO,oBAAoB,CAAC,WAAW,EAAE;oBACvC,QAAQ,EAAE,iBAAiD;oBAC3D,WAAW;iBACZ,CAAC,CAAC;YACL,CAAC,CAA2E;KAC/E,CAAC;AACJ,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD;IAErD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;IACpC,6FAA6F;IAC7F,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACnC,sCAAsC;QACtC,MAAM,EAAE,UAAU,EAAE,GAAG,WAAW,CAAC;QACnC,IAAI,UAAU,EAAE,CAAC;YACf,KAAK,CAAC,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAe,CAAC;YAChD,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBAC/B,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,CAAe,CAAC;YACxC,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,QAAQ,CAAC,KAAK,CAAC;YACrB,sFAAsF;YACtF,KAAK,CAAC,CAAC,KAAmD,CAAC;QAC7D,CAAC;IACH,CAAC;SAAM,CAAC;QACN,KAAK,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC;QACtB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YAC/B,gGAAgG;YAChG,gDAAgD;YAChD,KAAK,CAAC,CAAC,IAA6B,CAAC;QACvC,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,SAAS,CAAC,CAAC,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE;IAEN,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;IAC1C,IAAI,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,IAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;IAC7F,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,OAAO;IACT,CAAC;IACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACpB,OAAO,QAAQ,CAAC,YAAY,EAAE,CAAC;QAC7B,QAAQ,GAAG,MAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;QACzE,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,OAAO;QACT,CAAC;QACD,MAAM,QAAQ,CAAC,IAAI,CAAC;IACtB,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models.js\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\n\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string,\n>(\n pagedResult: PagedResult,\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n (((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }) as unknown as (settings?: TPageSettings) => AsyncIterableIterator),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult,\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n // can extract elements from this page\n const { toElements } = pagedResult;\n if (toElements) {\n yield* toElements(firstVal.value) as TElement[];\n for await (const page of pages) {\n yield* toElements(page) as TElement[];\n }\n } else {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n }\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {},\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n if (!response) {\n return;\n }\n yield response.page;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/index.d.ts b/node_modules/@azure/core-paging/dist/react-native/index.d.ts new file mode 100644 index 0000000..5712e6d --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/index.d.ts @@ -0,0 +1,3 @@ +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/index.d.ts.map b/node_modules/@azure/core-paging/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..66a03ce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/index.js b/node_modules/@azure/core-paging/dist/react-native/index.js new file mode 100644 index 0000000..4dca636 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models.js"; +export * from "./getPagedAsyncIterator.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/index.js.map b/node_modules/@azure/core-paging/dist/react-native/index.js.map new file mode 100644 index 0000000..800f434 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,aAAa,CAAC;AAC5B,cAAc,4BAA4B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./models.js\";\nexport * from \"./getPagedAsyncIterator.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/models.d.ts b/node_modules/@azure/core-paging/dist/react-native/models.d.ts new file mode 100644 index 0000000..5cc85f2 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/models.d.ts @@ -0,0 +1,69 @@ +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: TPageSettings) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + } | undefined>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; + /** + * A function to extract elements from a page. + */ + toElements?: (page: TPage) => unknown[]; +} +/** + * Paged collection of T items + */ +export type Paged = { + /** + * The T items on this page + */ + value: T[]; + /** + * The link to the next page of items + */ + nextLink?: string; +}; +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/models.d.ts.map b/node_modules/@azure/core-paging/dist/react-native/models.d.ts.map new file mode 100644 index 0000000..efa9bce --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AACD;;GAEG;AACH,MAAM,WAAW,0BAA0B,CACzC,QAAQ,EACR,KAAK,GAAG,QAAQ,EAAE,EAClB,aAAa,GAAG,YAAY;IAE5B;;OAEG;IACH,IAAI,IAAI,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC1C;;OAEG;IACH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,0BAA0B,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,CAAC;IACrF;;OAEG;IACH,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;CACpE;AAED;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,KAAK,EAAE,aAAa,GAAG,YAAY,EAAE,KAAK,GAAG,MAAM;IAC9E;;OAEG;IACH,aAAa,EAAE,KAAK,CAAC;IACrB;;OAEG;IACH,OAAO,EAAE,CACP,QAAQ,EAAE,KAAK,EACf,WAAW,CAAC,EAAE,MAAM,KACjB,OAAO,CAAC;QAAE,IAAI,EAAE,KAAK,CAAC;QAAC,YAAY,CAAC,EAAE,KAAK,CAAA;KAAE,GAAG,SAAS,CAAC,CAAC;IAChE;;;OAGG;IACH,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,aAAa,KAAK,qBAAqB,CAAC,KAAK,CAAC,CAAC;IAEpE;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,EAAE,CAAC;CACzC;AAED;;GAEG;AACH,MAAM,MAAM,KAAK,CAAC,CAAC,IAAI;IACrB;;OAEG;IACH,KAAK,EAAE,CAAC,EAAE,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/models.js b/node_modules/@azure/core-paging/dist/react-native/models.js new file mode 100644 index 0000000..63155a9 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/models.js.map b/node_modules/@azure/core-paging/dist/react-native/models.js.map new file mode 100644 index 0000000..a64e673 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * An interface that tracks the settings for paged iteration\n */\nexport interface PageSettings {\n /**\n * The token that keeps track of where to continue the iterator\n */\n continuationToken?: string;\n /**\n * The size of the page during paged iteration\n */\n maxPageSize?: number;\n}\n/**\n * An interface that allows async iterable iteration both to completion and by page.\n */\nexport interface PagedAsyncIterableIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n> {\n /**\n * The next method, part of the iteration protocol\n */\n next(): Promise>;\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator](): PagedAsyncIterableIterator;\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings?: TPageSettings) => AsyncIterableIterator;\n}\n\n/**\n * An interface that describes how to communicate with the service.\n */\nexport interface PagedResult {\n /**\n * Link to the first page of results.\n */\n firstPageLink: TLink;\n /**\n * A method that returns a page of results.\n */\n getPage: (\n pageLink: TLink,\n maxPageSize?: number,\n ) => Promise<{ page: TPage; nextPageLink?: TLink } | undefined>;\n /**\n * a function to implement the `byPage` method on the paged async iterator. The default is\n * one that sets the `maxPageSizeParam` from `settings.maxPageSize`.\n */\n byPage?: (settings?: TPageSettings) => AsyncIterableIterator;\n\n /**\n * A function to extract elements from a page.\n */\n toElements?: (page: TPage) => unknown[];\n}\n\n/**\n * Paged collection of T items\n */\nexport type Paged = {\n /**\n * The T items on this page\n */\n value: T[];\n /**\n * The link to the next page of items\n */\n nextLink?: string;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/package.json b/node_modules/@azure/core-paging/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-paging/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-paging/package.json b/node_modules/@azure/core-paging/package.json new file mode 100644 index 0000000..3ec4ece --- /dev/null +++ b/node_modules/@azure/core-paging/package.json @@ -0,0 +1,124 @@ +{ + "name": "@azure/core-paging", + "author": "Microsoft Corporation", + "sdk-type": "client", + "version": "1.6.1", + "description": "Core types for paging async iterable iterators", + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "tags": [ + "microsoft", + "clientruntime" + ], + "keywords": [ + "microsoft", + "clientruntime", + "azure", + "cloud" + ], + "files": [ + "dist/", + "LICENSE", + "README.md" + ], + "engines": { + "node": ">=18.0.0" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-paging/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "sideEffects": true, + "private": false, + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp *.tgz types *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"samples-dev/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint": "eslint package.json src --ext .ts --ext .cts --ext .mts", + "lint:fix": "eslint package.json src --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "sampleConfiguration": { + "skipFolder": true, + "disableDocsMs": true, + "productName": "Azure SDK Core", + "productSlugs": [ + "azure" + ] + }, + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-tracing/CHANGELOG.md b/node_modules/@azure/core-tracing/CHANGELOG.md new file mode 100644 index 0000000..ee8b4b0 --- /dev/null +++ b/node_modules/@azure/core-tracing/CHANGELOG.md @@ -0,0 +1,81 @@ +# Release History + +## 1.0.0-preview.13 (2021-07-15) + +### Features Added + +- Added support for disabling distributed tracing using the AZURE_TRACING_DISABLED environment variable. + +### Breaking Changes + +- Removed `TestTracer` and `TestSpan` from public API and into `@azure/test-utils`. [PR #16315](https://github.com/Azure/azure-sdk-for-js/pull/16315) + - `TestTracer` and `TestSpan` are intended for test support when used by other Azure packages and not intended for use by end users. +- Removed `setTracer`, @azure/core-tracing will now rely on the `trace` API to fetch a tracer from the global tracer provider. [PR #16347](https://github.com/Azure/azure-sdk-for-js/pull/16347) + - If you are using `setTracer`, please use `trace.setGlobalTracerProvider(provider)` instead as described in the OpenTelemetry documentation. +- Removed `NoOpTracer` and `NoOpSpan` from the public API. Please use `trace.wrapSpanContext(INVALID_SPAN_CONTEXT)` from `@opentelemetry/api` instead. [PR #16315](https://github.com/Azure/azure-sdk-for-js/pull/16315) + +## 1.0.0-preview.12 (2021-06-30) + +- Update `@opentelemetry/api` to version 1.0.0 [PR #15883](https://github.com/Azure/azure-sdk-for-js/pull/15883) + - This version ships with ESM modules and fixes an issue where Angular projects would warn ab out optimization bailouts due to dependencies on CommonJS or AMD. + +### Breaking Changes + +- Removed `OpenCensusSpanWrapper` and `OpenCensusTracerWrapper` from the public API. Customers using these wrappers should migrate to using `OpenTelemetry` directly. [PR #15770](https://github.com/Azure/azure-sdk-for-js/pull/15770) +- Update `@azure/core-tracing` to version 1.0.0-preview.12. This brings core-tracing up to date with `@opentelemetry/api@1.0.0`. + - `Span#context` was renamed to `Span#spanContext`. This change is supported in `@azure/core-http@1.2.7`. + +## 1.0.0-preview.11 (2021-03-30) + +### Breaking Changes + +- Update @azure/core-tracing to version 1.0.0-preview.11. This brings core-tracing up to date with @opentelemetry/api@1.0.0-rc.0. + There are two scenarios that will require changes if you are using tracing: + - Previously, you would pass a parent span using the `OperationOptions.tracingOptions.spanOptions.parentSpan` property. This has been + changed so that you now specify a parent `Context` using the `OperationOptions.tracingOptions.tracingContext` property instead. + - The status code for Spans is no longer of type `CanonicalCode`. Instead, it's now `SpanStatusCode`, which also has a smaller range of values. + +## 1.0.0-preview.10 (2021-03-04) + +- Internal improvements to make future opentelemetry updates simpler. + +## 1.0.0-preview.9 (2020-08-04) + +- Update `@opentelemetry/api` to version 0.10.2 [PR 10393](https://github.com/Azure/azure-sdk-for-js/pull/10393) + +## 1.0.0-preview.8 (2020-04-28) + +- Update `TestSpan` to allow setting span attributes [PR link](https://github.com/Azure/azure-sdk-for-js/pull/6565). +- [BREAKING] Migrate to OpenTelemetry 0.6 using the new `@opentelemetry/api` package. There were a few breaking changes: + - `SpanContext` now requires traceFlags to be set. + - `Tracer` has removed `recordSpanData`, `getBinaryFormat`, and `getHttpTextFormat`. + - `Tracer.getCurrentSpan` returns `undefined` instead of `null` when unset. + - `Link` objects renamed `spanContext` property to `context`. + +## 1.0.0-preview.7 (2019-12-03) + +- Updated the behavior of how incompatible versions of OpenTelemetry Tracer are handled. Now, errors will be thrown only if the user has manually set a Tracer. This means that incompatible versions will be silently ignored when tracing is not enabled. +- Updated to use OpenTelemetry 0.2 via the `@opentelemetry/types` package. There were two breaking changes in this update: + - `isRecordingEvents` on `Span` was renamed to `isRecording`. [PR link](https://github.com/open-telemetry/opentelemetry-js/pull/454) + - `addLink` was removed from `Span` as links are now only allowed to be added during span creation. This is possible by specifying any necessary links inside `SpanOptions`. [PR link](https://github.com/open-telemetry/opentelemetry-js/pull/449) + +## 1.0.0-preview.5 (2019-10-22) + +- Fixes issue where loading multiple copies of this module could result in the tracer set by `setTracer()` being reset. + +## 1.0.0-preview.4 (2019-10-08) + +- Remove dependency on the `debug` module to ensure compatibility with IE11 + +## 1.0.0-preview.3 (2019-10-07) + +- Updated to use the latest types from OpenTelemetry (PR [#5182](https://github.com/Azure/azure-sdk-for-js/pull/5182)) +- Clean up and refactored code for easier usage and testability. (PR [#5233](https://github.com/Azure/azure-sdk-for-js/pull/5233) and PR [#5283](https://github.com/Azure/azure-sdk-for-js/pull/5283)) + +## 1.0.0-preview.2 (2019-09-09) + +Updated the `OpenCensusSpanPlugin` & the `NoOpSpanPlugin` to support for retrieving span context. This allows updating of request headers with the right [span context](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format). (PR [#4712](https://github.com/Azure/azure-sdk-for-js/pull/4712)) + +## 1.0.0-preview.1 (2019-08-05) + +Provides low-level interfaces and helper methods for tracing in Azure SDK diff --git a/node_modules/@azure/core-tracing/LICENSE b/node_modules/@azure/core-tracing/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-tracing/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-tracing/README.md b/node_modules/@azure/core-tracing/README.md new file mode 100644 index 0000000..dc088cb --- /dev/null +++ b/node_modules/@azure/core-tracing/README.md @@ -0,0 +1,72 @@ +# Azure Core tracing library for JavaScript + +This is the core tracing library that provides low-level interfaces and helper methods for tracing in Azure SDK JavaScript libraries which work in the browser and Node.js. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/core-tracing +``` + +## Key Concepts + +The `@azure/core-tracing` package supports enabling tracing for Azure SDK packages, using an [OpenTelemetry](https://opentelemetry.io/) `Tracer`. + +By default, all libraries log with a `NoOpTracer` that takes no action. +To change this, you have to use `setTracer` to set a new default `Tracer`. + +## Examples + +### Example 1 - Setting an OpenTelemetry Tracer + +```js +const opentelemetry = require("@opentelemetry/api"); +const { BasicTracer, SimpleSpanProcessor } = require("@opentelemetry/tracing"); +const { ZipkinExporter } = require("@opentelemetry/exporter-zipkin"); +const { setTracer } = require("@azure/core-tracing"); + +const exporter = new ZipkinExporter({ + serviceName: "azure-tracing-sample" +}); +const tracer = new BasicTracer(); +tracer.addSpanProcessor(new SimpleSpanProcessor(exporter)); + +setTracer(tracer); + +const rootSpan = tracer.startSpan("root"); +const context = opentelemetry.setSpan(opentelemetry.context.active(), rootSpan); + +// Call some client library methods and pass rootSpan via tracingOptions. + +rootSpan.end(); +exporter.shutdown(); +``` + +### Example 2 - Passing current Context to library operations + +```js +// Given a BlobClient from @azure/storage-blob +const result = await blobClient.download(undefined, undefined, { + tracingOptions: { + tracingContext: context + } +}); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-tracing%2FREADME.png) diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js new file mode 100644 index 0000000..b021ccc --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { SpanKind, setSpan, context as otContext, getTracer } from "./interfaces"; +import { trace, INVALID_SPAN_CONTEXT } from "@opentelemetry/api"; +export function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +export function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} +//# sourceMappingURL=createSpan.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map new file mode 100644 index 0000000..ab64b61 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpan.js","sourceRoot":"","sources":["../../src/createSpan.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAIL,QAAQ,EACR,OAAO,EACP,OAAO,IAAI,SAAS,EACpB,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAuBjE,MAAM,UAAU,iBAAiB;;IAC/B,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;QAClC,qDAAqD;QACrD,OAAO,KAAK,CAAC;KACd;IAED,MAAM,yBAAyB,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,sBAAsB,0CAAE,WAAW,EAAE,CAAC;IAEpF,IAAI,yBAAyB,KAAK,OAAO,IAAI,yBAAyB,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IAED,OAAO,OAAO,CAAC,yBAAyB,CAAC,CAAC;AAC5C,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,kBAAkB,CAAC,IAA4B;IAC7D,OAAO,UACL,aAAqB,EACrB,gBAA+B;QAE/B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;QAC3B,MAAM,cAAc,GAAG,CAAA,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAE,cAAc,KAAI,EAAE,CAAC;QAC9D,MAAM,WAAW,mBACf,IAAI,EAAE,QAAQ,CAAC,QAAQ,IACpB,cAAc,CAAC,WAAW,CAC9B,CAAC;QAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,aAAa,IAAI,aAAa,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC;QAE/F,IAAI,IAAU,CAAC;QACf,IAAI,iBAAiB,EAAE,EAAE;YACvB,IAAI,GAAG,KAAK,CAAC,eAAe,CAAC,oBAAoB,CAAC,CAAC;SACpD;aAAM;YACL,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,WAAW,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;SAC/E;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SACnD;QAED,IAAI,cAAc,GAAG,cAAc,CAAC,WAAW,IAAI,EAAE,CAAC;QAEtD,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,IAAI,CAAC,SAAS,EAAE;YACxC,cAAc,mCACT,cAAc,CAAC,WAAW,KAC7B,UAAU,kCACL,WAAW,CAAC,UAAU,KACzB,cAAc,EAAE,IAAI,CAAC,SAAS,MAEjC,CAAC;SACH;QAED,MAAM,iBAAiB,mCAClB,cAAc,KACjB,WAAW,EAAE,cAAc,EAC3B,cAAc,EAAE,OAAO,CAAC,cAAc,CAAC,cAAc,IAAI,SAAS,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,GACnF,CAAC;QAEF,MAAM,mBAAmB,GAAG,gCACvB,gBAAgB,KACnB,cAAc,EAAE,iBAAiB,GAC2B,CAAC;QAE/D,OAAO;YACL,IAAI;YACJ,cAAc,EAAE,mBAAmB;SACpC,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n OperationTracingOptions,\n Span,\n SpanOptions,\n SpanKind,\n setSpan,\n context as otContext,\n getTracer\n} from \"./interfaces\";\nimport { trace, INVALID_SPAN_CONTEXT } from \"@opentelemetry/api\";\n\n/**\n * Arguments for `createSpanFunction` that allow you to specify the\n * prefix for each created span as well as the `az.namespace` attribute.\n *\n * @hidden\n */\nexport interface CreateSpanFunctionArgs {\n /**\n * Package name prefix.\n *\n * NOTE: if this is empty no prefix will be applied to created Span names.\n */\n packagePrefix: string;\n /**\n * Service namespace\n *\n * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans.\n */\n namespace: string;\n}\n\nexport function isTracingDisabled(): boolean {\n if (typeof process === \"undefined\") {\n // not supported in browser for now without polyfills\n return false;\n }\n\n const azureTracingDisabledValue = process.env.AZURE_TRACING_DISABLED?.toLowerCase();\n\n if (azureTracingDisabledValue === \"false\" || azureTracingDisabledValue === \"0\") {\n return false;\n }\n\n return Boolean(azureTracingDisabledValue);\n}\n\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nexport function createSpanFunction(args: CreateSpanFunctionArgs) {\n return function(\n operationName: string,\n operationOptions: T | undefined\n ): { span: Span; updatedOptions: T } {\n const tracer = getTracer();\n const tracingOptions = operationOptions?.tracingOptions || {};\n const spanOptions: SpanOptions = {\n kind: SpanKind.INTERNAL,\n ...tracingOptions.spanOptions\n };\n\n const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName;\n\n let span: Span;\n if (isTracingDisabled()) {\n span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT);\n } else {\n span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n }\n\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n\n let newSpanOptions = tracingOptions.spanOptions || {};\n\n if (span.isRecording() && args.namespace) {\n newSpanOptions = {\n ...tracingOptions.spanOptions,\n attributes: {\n ...spanOptions.attributes,\n \"az.namespace\": args.namespace\n }\n };\n }\n\n const newTracingOptions: Required = {\n ...tracingOptions,\n spanOptions: newSpanOptions,\n tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span)\n };\n\n const newOperationOptions = {\n ...operationOptions,\n tracingOptions: newTracingOptions\n } as T & { tracingOptions: Required };\n\n return {\n span,\n updatedOptions: newOperationOptions\n };\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js b/node_modules/@azure/core-tracing/dist-esm/src/index.js new file mode 100644 index 0000000..065b66c --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/index.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Tracers and wrappers +export { createSpanFunction } from "./createSpan"; +// Shared interfaces +export { context, getSpan, getSpanContext, getTracer, isSpanContextValid, setSpan, setSpanContext, SpanKind, SpanStatusCode } from "./interfaces"; +// Utilities +export { extractSpanContextFromTraceParentHeader, getTraceParentHeader } from "./utils/traceParentHeader"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js.map b/node_modules/@azure/core-tracing/dist-esm/src/index.js.map new file mode 100644 index 0000000..37bcb74 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,uBAAuB;AACvB,OAAO,EAAE,kBAAkB,EAA0B,MAAM,cAAc,CAAC;AAE1E,oBAAoB;AACpB,OAAO,EACL,OAAO,EAOP,OAAO,EACP,cAAc,EACd,SAAS,EAET,kBAAkB,EAGlB,OAAO,EACP,cAAc,EAKd,QAAQ,EAGR,cAAc,EAKf,MAAM,cAAc,CAAC;AAEtB,YAAY;AACZ,OAAO,EACL,uCAAuC,EACvC,oBAAoB,EACrB,MAAM,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Tracers and wrappers\nexport { createSpanFunction, CreateSpanFunctionArgs } from \"./createSpan\";\n\n// Shared interfaces\nexport {\n context,\n Context,\n ContextAPI,\n Exception,\n ExceptionWithCode,\n ExceptionWithMessage,\n ExceptionWithName,\n getSpan,\n getSpanContext,\n getTracer,\n HrTime,\n isSpanContextValid,\n Link,\n OperationTracingOptions,\n setSpan,\n setSpanContext,\n Span,\n SpanAttributes,\n SpanAttributeValue,\n SpanContext,\n SpanKind,\n SpanOptions,\n SpanStatus,\n SpanStatusCode,\n TimeInput,\n TraceFlags,\n Tracer,\n TraceState\n} from \"./interfaces\";\n\n// Utilities\nexport {\n extractSpanContextFromTraceParentHeader,\n getTraceParentHeader\n} from \"./utils/traceParentHeader\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js new file mode 100644 index 0000000..e251b1f --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { context as otContext, trace as otTrace } from "@opentelemetry/api"; +/** + * The kind of span. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +export function getSpan(context) { + return otTrace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +export function setSpan(context, span) { + return otTrace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return otTrace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +export function getSpanContext(context) { + return otTrace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +export function isSpanContextValid(context) { + return otTrace.isSpanContextValid(context); +} +export function getTracer(name, version) { + return otTrace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +export const context = otContext; +/** SpanStatusCode */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map new file mode 100644 index 0000000..cd371cd --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../../src/interfaces.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,KAAK,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAoF5E;;GAEG;AACH,MAAM,CAAN,IAAY,QAyBX;AAzBD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IACZ;;;OAGG;IACH,2CAAU,CAAA;IACV;;;OAGG;IACH,2CAAU,CAAA;IACV;;;;OAIG;IACH,+CAAY,CAAA;IACZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EAzBW,QAAQ,KAAR,QAAQ,QAyBnB;AAqDD;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAClC,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB,EAAE,WAAwB;IACvE,OAAO,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACtD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;IAC7C,OAAO,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;AACzC,CAAC;AAYD;;;;;;;GAOG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAoB;IACrD,OAAO,OAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AAC7C,CAAC;AAUD,MAAM,UAAU,SAAS,CAAC,IAAa,EAAE,OAAgB;IACvD,OAAO,OAAO,CAAC,SAAS,CAAC,IAAI,IAAI,oBAAoB,EAAE,OAAO,CAAC,CAAC;AAClE,CAAC;AAED,iCAAiC;AACjC,MAAM,CAAC,MAAM,OAAO,GAAe,SAAS,CAAC;AAE7C,qBAAqB;AACrB,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { context as otContext, trace as otTrace } from \"@opentelemetry/api\";\n\n/**\n * A Tracer.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method does NOT modify the current Context.\n *\n * @param name - The name of the span\n * @param options - SpanOptions used for span creation\n * @param context - Context to use to extract parent\n * @returns The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n}\n\n/**\n * TraceState.\n */\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key - key of the TraceState entry.\n * @param value - value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key - the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key - with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n\n/**\n * Represents high resolution time.\n */\nexport declare type HrTime = [number, number];\n\n/**\n * Used to represent a Time.\n */\nexport type TimeInput = HrTime | number | Date;\n\n/**\n * The status for a span.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * The kind of span.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4\n}\n\n/**\n * An Exception for a Span.\n */\nexport declare type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n\n/**\n * An Exception with a code.\n */\nexport interface ExceptionWithCode {\n /** The code. */\n code: string | number;\n /** The name. */\n name?: string;\n /** The message. */\n message?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a message.\n */\nexport interface ExceptionWithMessage {\n /** The code. */\n code?: string | number;\n /** The message. */\n message: string;\n /** The name. */\n name?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a name.\n */\nexport interface ExceptionWithName {\n /** The code. */\n code?: string | number;\n /** The message. */\n message?: string;\n /** The name. */\n name: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return otTrace.getSpan(context);\n}\n\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return otTrace.setSpan(context, span);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nexport function setSpanContext(context: Context, spanContext: SpanContext): Context {\n return otTrace.setSpanContext(context, spanContext);\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return otTrace.getSpanContext(context);\n}\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport interface ContextAPI {\n /**\n * Get the currently active context\n */\n active(): Context;\n}\n\n/**\n * Returns true of the given {@link SpanContext} is valid.\n * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec.\n *\n * @param context - the {@link SpanContext} to validate.\n *\n * @returns true if the {@link SpanContext} is valid, false otherwise.\n */\nexport function isSpanContextValid(context: SpanContext): boolean {\n return otTrace.isSpanContextValid(context);\n}\n\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(): Tracer;\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(name: string, version?: string): Tracer;\nexport function getTracer(name?: string, version?: string): Tracer {\n return otTrace.getTracer(name || \"azure/core-tracing\", version);\n}\n\n/** Entrypoint for context API */\nexport const context: ContextAPI = otContext;\n\n/** SpanStatusCode */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2\n}\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key - the key for this attribute.\n * @param value - the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n /**\n * Sets attributes to the span.\n *\n * @param attributes - the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n /**\n * Adds an event to the Span.\n *\n * @param name - the name of the event.\n * @param attributesOrStartTime - the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is TimeInput and 3rd param is undefined\n * @param startTime - start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status - the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param endTime - the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name - the Span name.\n */\n updateName(name: string): this;\n}\n\n/**\n * Shorthand enum for common traceFlags values inside SpanContext\n */\nexport const enum TraceFlags {\n /** No flag set. */\n NONE = 0x0,\n /** Caller is collecting trace information. */\n SAMPLED = 0x1\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry\n */\nexport interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n\n/**\n * Used to specify a span that is linked to another.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Attributes for a Span.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /**\n * The type of Span. Default to SpanKind.INTERNAL\n */\n kind?: SpanKind;\n\n /**\n * A manually specified start time for the created `Span` object.\n */\n startTime?: TimeInput;\n}\n\n/**\n * Tracing options to set on an operation.\n */\nexport interface OperationTracingOptions {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context to use for created Spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * OpenTelemetry compatible interface for Context\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js new file mode 100644 index 0000000..90657ba --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +export function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +export function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} +//# sourceMappingURL=traceParentHeader.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map new file mode 100644 index 0000000..d63dd9d --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"traceParentHeader.js","sourceRoot":"","sources":["../../../src/utils/traceParentHeader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,OAAO,GAAG,IAAI,CAAC;AAErB;;;;GAIG;AACH,MAAM,UAAU,uCAAuC,CACrD,iBAAyB;IAEzB,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE3C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,OAAO;KACR;IAED,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,CAAC,GAAG,KAAK,CAAC;IAEvD,IAAI,OAAO,KAAK,OAAO,EAAE;QACvB,OAAO;KACR;IAED,MAAM,UAAU,GAAG,QAAQ,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAgB;QAC/B,MAAM;QACN,OAAO;QACP,UAAU;KACX,CAAC;IAEF,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,WAAwB;IAC3D,MAAM,aAAa,GAAa,EAAE,CAAC;IACnC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KAC/B;IACD,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;QACvB,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC9B;IAED,IAAI,aAAa,CAAC,MAAM,EAAE;QACxB,OAAO;KACR;IAED,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,gBAAmB,CAAC;IACxD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACpC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC;IAErE,uEAAuE;IACvE,OAAO,GAAG,OAAO,IAAI,WAAW,CAAC,OAAO,IAAI,WAAW,CAAC,MAAM,IAAI,UAAU,EAAE,CAAC;AACjF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SpanContext, TraceFlags } from \"../interfaces\";\n\nconst VERSION = \"00\";\n\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nexport function extractSpanContextFromTraceParentHeader(\n traceParentHeader: string\n): SpanContext | undefined {\n const parts = traceParentHeader.split(\"-\");\n\n if (parts.length !== 4) {\n return;\n }\n\n const [version, traceId, spanId, traceOptions] = parts;\n\n if (version !== VERSION) {\n return;\n }\n\n const traceFlags = parseInt(traceOptions, 16);\n\n const spanContext: SpanContext = {\n spanId,\n traceId,\n traceFlags\n };\n\n return spanContext;\n}\n\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nexport function getTraceParentHeader(spanContext: SpanContext): string | undefined {\n const missingFields: string[] = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n\n if (missingFields.length) {\n return;\n }\n\n const flags = spanContext.traceFlags || TraceFlags.NONE;\n const hexFlags = flags.toString(16);\n const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags;\n\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/index.js b/node_modules/@azure/core-tracing/dist/index.js new file mode 100644 index 0000000..527bbb4 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/index.js @@ -0,0 +1,219 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var api = require('@opentelemetry/api'); + +// Copyright (c) Microsoft Corporation. +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(exports.SpanKind || (exports.SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +function getSpan(context) { + return api.trace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +function setSpan(context, span) { + return api.trace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return api.trace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +function getSpanContext(context) { + return api.trace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +function isSpanContextValid(context) { + return api.trace.isSpanContextValid(context); +} +function getTracer(name, version) { + return api.trace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +const context = api.context; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); + +// Copyright (c) Microsoft Corporation. +function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} + +exports.context = context; +exports.createSpanFunction = createSpanFunction; +exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; +exports.getSpan = getSpan; +exports.getSpanContext = getSpanContext; +exports.getTraceParentHeader = getTraceParentHeader; +exports.getTracer = getTracer; +exports.isSpanContextValid = isSpanContextValid; +exports.setSpan = setSpan; +exports.setSpanContext = setSpanContext; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-tracing/dist/index.js.map b/node_modules/@azure/core-tracing/dist/index.js.map new file mode 100644 index 0000000..c1b0268 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/interfaces.ts","../src/createSpan.ts","../src/utils/traceParentHeader.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { context as otContext, trace as otTrace } from \"@opentelemetry/api\";\n\n/**\n * A Tracer.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method does NOT modify the current Context.\n *\n * @param name - The name of the span\n * @param options - SpanOptions used for span creation\n * @param context - Context to use to extract parent\n * @returns The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n}\n\n/**\n * TraceState.\n */\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key - key of the TraceState entry.\n * @param value - value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key - the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key - with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n\n/**\n * Represents high resolution time.\n */\nexport declare type HrTime = [number, number];\n\n/**\n * Used to represent a Time.\n */\nexport type TimeInput = HrTime | number | Date;\n\n/**\n * The status for a span.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * The kind of span.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4\n}\n\n/**\n * An Exception for a Span.\n */\nexport declare type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n\n/**\n * An Exception with a code.\n */\nexport interface ExceptionWithCode {\n /** The code. */\n code: string | number;\n /** The name. */\n name?: string;\n /** The message. */\n message?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a message.\n */\nexport interface ExceptionWithMessage {\n /** The code. */\n code?: string | number;\n /** The message. */\n message: string;\n /** The name. */\n name?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a name.\n */\nexport interface ExceptionWithName {\n /** The code. */\n code?: string | number;\n /** The message. */\n message?: string;\n /** The name. */\n name: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return otTrace.getSpan(context);\n}\n\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return otTrace.setSpan(context, span);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nexport function setSpanContext(context: Context, spanContext: SpanContext): Context {\n return otTrace.setSpanContext(context, spanContext);\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return otTrace.getSpanContext(context);\n}\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport interface ContextAPI {\n /**\n * Get the currently active context\n */\n active(): Context;\n}\n\n/**\n * Returns true of the given {@link SpanContext} is valid.\n * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec.\n *\n * @param context - the {@link SpanContext} to validate.\n *\n * @returns true if the {@link SpanContext} is valid, false otherwise.\n */\nexport function isSpanContextValid(context: SpanContext): boolean {\n return otTrace.isSpanContextValid(context);\n}\n\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(): Tracer;\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(name: string, version?: string): Tracer;\nexport function getTracer(name?: string, version?: string): Tracer {\n return otTrace.getTracer(name || \"azure/core-tracing\", version);\n}\n\n/** Entrypoint for context API */\nexport const context: ContextAPI = otContext;\n\n/** SpanStatusCode */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2\n}\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key - the key for this attribute.\n * @param value - the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n /**\n * Sets attributes to the span.\n *\n * @param attributes - the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n /**\n * Adds an event to the Span.\n *\n * @param name - the name of the event.\n * @param attributesOrStartTime - the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is TimeInput and 3rd param is undefined\n * @param startTime - start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status - the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param endTime - the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name - the Span name.\n */\n updateName(name: string): this;\n}\n\n/**\n * Shorthand enum for common traceFlags values inside SpanContext\n */\nexport const enum TraceFlags {\n /** No flag set. */\n NONE = 0x0,\n /** Caller is collecting trace information. */\n SAMPLED = 0x1\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry\n */\nexport interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n\n/**\n * Used to specify a span that is linked to another.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Attributes for a Span.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /**\n * The type of Span. Default to SpanKind.INTERNAL\n */\n kind?: SpanKind;\n\n /**\n * A manually specified start time for the created `Span` object.\n */\n startTime?: TimeInput;\n}\n\n/**\n * Tracing options to set on an operation.\n */\nexport interface OperationTracingOptions {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context to use for created Spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * OpenTelemetry compatible interface for Context\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n OperationTracingOptions,\n Span,\n SpanOptions,\n SpanKind,\n setSpan,\n context as otContext,\n getTracer\n} from \"./interfaces\";\nimport { trace, INVALID_SPAN_CONTEXT } from \"@opentelemetry/api\";\n\n/**\n * Arguments for `createSpanFunction` that allow you to specify the\n * prefix for each created span as well as the `az.namespace` attribute.\n *\n * @hidden\n */\nexport interface CreateSpanFunctionArgs {\n /**\n * Package name prefix.\n *\n * NOTE: if this is empty no prefix will be applied to created Span names.\n */\n packagePrefix: string;\n /**\n * Service namespace\n *\n * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans.\n */\n namespace: string;\n}\n\nexport function isTracingDisabled(): boolean {\n if (typeof process === \"undefined\") {\n // not supported in browser for now without polyfills\n return false;\n }\n\n const azureTracingDisabledValue = process.env.AZURE_TRACING_DISABLED?.toLowerCase();\n\n if (azureTracingDisabledValue === \"false\" || azureTracingDisabledValue === \"0\") {\n return false;\n }\n\n return Boolean(azureTracingDisabledValue);\n}\n\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nexport function createSpanFunction(args: CreateSpanFunctionArgs) {\n return function(\n operationName: string,\n operationOptions: T | undefined\n ): { span: Span; updatedOptions: T } {\n const tracer = getTracer();\n const tracingOptions = operationOptions?.tracingOptions || {};\n const spanOptions: SpanOptions = {\n kind: SpanKind.INTERNAL,\n ...tracingOptions.spanOptions\n };\n\n const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName;\n\n let span: Span;\n if (isTracingDisabled()) {\n span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT);\n } else {\n span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n }\n\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n\n let newSpanOptions = tracingOptions.spanOptions || {};\n\n if (span.isRecording() && args.namespace) {\n newSpanOptions = {\n ...tracingOptions.spanOptions,\n attributes: {\n ...spanOptions.attributes,\n \"az.namespace\": args.namespace\n }\n };\n }\n\n const newTracingOptions: Required = {\n ...tracingOptions,\n spanOptions: newSpanOptions,\n tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span)\n };\n\n const newOperationOptions = {\n ...operationOptions,\n tracingOptions: newTracingOptions\n } as T & { tracingOptions: Required };\n\n return {\n span,\n updatedOptions: newOperationOptions\n };\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SpanContext, TraceFlags } from \"../interfaces\";\n\nconst VERSION = \"00\";\n\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nexport function extractSpanContextFromTraceParentHeader(\n traceParentHeader: string\n): SpanContext | undefined {\n const parts = traceParentHeader.split(\"-\");\n\n if (parts.length !== 4) {\n return;\n }\n\n const [version, traceId, spanId, traceOptions] = parts;\n\n if (version !== VERSION) {\n return;\n }\n\n const traceFlags = parseInt(traceOptions, 16);\n\n const spanContext: SpanContext = {\n spanId,\n traceId,\n traceFlags\n };\n\n return spanContext;\n}\n\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nexport function getTraceParentHeader(spanContext: SpanContext): string | undefined {\n const missingFields: string[] = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n\n if (missingFields.length) {\n return;\n }\n\n const flags = spanContext.traceFlags || TraceFlags.NONE;\n const hexFlags = flags.toString(16);\n const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags;\n\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`;\n}\n"],"names":["SpanKind","otTrace","otContext","SpanStatusCode","trace","INVALID_SPAN_CONTEXT"],"mappings":";;;;;;AAAA;AA0FA,WAAY,QAAQ;;IAElB,+CAAY,CAAA;;;;;IAKZ,2CAAU,CAAA;;;;;IAKV,2CAAU,CAAA;;;;;;IAMV,+CAAY,CAAA;;;;;;IAMZ,+CAAY,CAAA;AACd,CAAC,EAzBWA,gBAAQ,KAARA,gBAAQ,QAyBnB;AAqDD;;;;;SAKgB,OAAO,CAAC,OAAgB;IACtC,OAAOC,SAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAClC,CAAC;AAED;;;;;;SAMgB,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAOA,SAAO,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;;;;SAOgB,cAAc,CAAC,OAAgB,EAAE,WAAwB;IACvE,OAAOA,SAAO,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACtD,CAAC;AAED;;;;;SAKgB,cAAc,CAAC,OAAgB;IAC7C,OAAOA,SAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;AACzC,CAAC;AAYD;;;;;;;;SAQgB,kBAAkB,CAAC,OAAoB;IACrD,OAAOA,SAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AAC7C,CAAC;SAUe,SAAS,CAAC,IAAa,EAAE,OAAgB;IACvD,OAAOA,SAAO,CAAC,SAAS,CAAC,IAAI,IAAI,oBAAoB,EAAE,OAAO,CAAC,CAAC;AAClE,CAAC;AAED;MACa,OAAO,GAAeC,YAAU;AAG7C,WAAY,cAAc;;;;IAIxB,qDAAS,CAAA;;;;;IAKT,+CAAM,CAAA;;;;IAIN,qDAAS,CAAA;AACX,CAAC,EAdWC,sBAAc,KAAdA,sBAAc;;ACrP1B;AACA,SAkCgB,iBAAiB;;IAC/B,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;;QAElC,OAAO,KAAK,CAAC;KACd;IAED,MAAM,yBAAyB,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,sBAAsB,0CAAE,WAAW,EAAE,CAAC;IAEpF,IAAI,yBAAyB,KAAK,OAAO,IAAI,yBAAyB,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IAED,OAAO,OAAO,CAAC,yBAAyB,CAAC,CAAC;AAC5C,CAAC;AAED;;;;;;;;;;;;;;;;;;AAkBA,SAAgB,kBAAkB,CAAC,IAA4B;IAC7D,OAAO,UACL,aAAqB,EACrB,gBAA+B;QAE/B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;QAC3B,MAAM,cAAc,GAAG,CAAA,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAE,cAAc,KAAI,EAAE,CAAC;QAC9D,MAAM,WAAW,mBACf,IAAI,EAAEH,gBAAQ,CAAC,QAAQ,IACpB,cAAc,CAAC,WAAW,CAC9B,CAAC;QAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,GAAG,GAAG,IAAI,CAAC,aAAa,IAAI,aAAa,EAAE,GAAG,aAAa,CAAC;QAE/F,IAAI,IAAU,CAAC;QACf,IAAI,iBAAiB,EAAE,EAAE;YACvB,IAAI,GAAGI,SAAK,CAAC,eAAe,CAACC,wBAAoB,CAAC,CAAC;SACpD;aAAM;YACL,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,WAAW,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;SAC/E;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SACnD;QAED,IAAI,cAAc,GAAG,cAAc,CAAC,WAAW,IAAI,EAAE,CAAC;QAEtD,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,IAAI,CAAC,SAAS,EAAE;YACxC,cAAc,mCACT,cAAc,CAAC,WAAW,KAC7B,UAAU,kCACL,WAAW,CAAC,UAAU,KACzB,cAAc,EAAE,IAAI,CAAC,SAAS,MAEjC,CAAC;SACH;QAED,MAAM,iBAAiB,mCAClB,cAAc,KACjB,WAAW,EAAE,cAAc,EAC3B,cAAc,EAAE,OAAO,CAAC,cAAc,CAAC,cAAc,IAAIH,OAAS,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,GACnF,CAAC;QAEF,MAAM,mBAAmB,GAAG,gCACvB,gBAAgB,KACnB,cAAc,EAAE,iBAAiB,GAC2B,CAAC;QAE/D,OAAO;YACL,IAAI;YACJ,cAAc,EAAE,mBAAmB;SACpC,CAAC;KACH,CAAC;AACJ,CAAC;;ACzHD;AACA;AAIA,MAAM,OAAO,GAAG,IAAI,CAAC;AAErB;;;;;AAKA,SAAgB,uCAAuC,CACrD,iBAAyB;IAEzB,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE3C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,OAAO;KACR;IAED,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,CAAC,GAAG,KAAK,CAAC;IAEvD,IAAI,OAAO,KAAK,OAAO,EAAE;QACvB,OAAO;KACR;IAED,MAAM,UAAU,GAAG,QAAQ,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAgB;QAC/B,MAAM;QACN,OAAO;QACP,UAAU;KACX,CAAC;IAEF,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;;;AAKA,SAAgB,oBAAoB,CAAC,WAAwB;IAC3D,MAAM,aAAa,GAAa,EAAE,CAAC;IACnC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KAC/B;IACD,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;QACvB,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC9B;IAED,IAAI,aAAa,CAAC,MAAM,EAAE;QACxB,OAAO;KACR;IAED,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,iBAAoB;IACxD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACpC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,IAAI,QAAQ,EAAE,GAAG,QAAQ,CAAC;;IAGrE,OAAO,GAAG,OAAO,IAAI,WAAW,CAAC,OAAO,IAAI,WAAW,CAAC,MAAM,IAAI,UAAU,EAAE,CAAC;AACjF,CAAC;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/package.json b/node_modules/@azure/core-tracing/package.json new file mode 100644 index 0000000..1cd7235 --- /dev/null +++ b/node_modules/@azure/core-tracing/package.json @@ -0,0 +1,99 @@ +{ + "name": "@azure/core-tracing", + "version": "1.0.0-preview.13", + "description": "Provides low-level interfaces and helper methods for tracing in Azure SDK", + "sdk-type": "client", + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "browser": { + "./dist-esm/src/utils/global.js": "./dist-esm/src/utils/global.browser.js" + }, + "types": "types/core-tracing.d.ts", + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && rollup -c 2>&1", + "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "prebuild": "npm run clean", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && rollup -c 2>&1 && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "files": [ + "dist/", + "dist-esm/src/", + "types/core-tracing.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "tracing", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=12.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/README.md", + "sideEffects": false, + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/core-auth": "^1.3.0", + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.7.11", + "@opentelemetry/tracing": "^0.22.0", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.15.0", + "inherits": "^2.0.3", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "prettier": "^1.16.4", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "typescript": "~4.2.0", + "util": "^0.12.1", + "typedoc": "0.15.2", + "sinon": "^9.0.2", + "@types/sinon": "^9.0.4" + } +} diff --git a/node_modules/@azure/core-tracing/types/core-tracing.d.ts b/node_modules/@azure/core-tracing/types/core-tracing.d.ts new file mode 100644 index 0000000..bdae78d --- /dev/null +++ b/node_modules/@azure/core-tracing/types/core-tracing.d.ts @@ -0,0 +1,531 @@ + +/** + * OpenTelemetry compatible interface for Context + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} + +/** Entrypoint for context API */ +export declare const context: ContextAPI; + +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare interface ContextAPI { + /** + * Get the currently active context + */ + active(): Context; +} + +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +export declare function createSpanFunction(args: CreateSpanFunctionArgs): (operationName: string, operationOptions: T | undefined) => { + span: Span; + updatedOptions: T; +}; + +/** + * Arguments for `createSpanFunction` that allow you to specify the + * prefix for each created span as well as the `az.namespace` attribute. + * + * @hidden + */ +export declare interface CreateSpanFunctionArgs { + /** + * Package name prefix. + * + * NOTE: if this is empty no prefix will be applied to created Span names. + */ + packagePrefix: string; + /** + * Service namespace + * + * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans. + */ + namespace: string; +} + +/** + * An Exception for a Span. + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; + +/** + * An Exception with a code. + */ +export declare interface ExceptionWithCode { + /** The code. */ + code: string | number; + /** The name. */ + name?: string; + /** The message. */ + message?: string; + /** The stack. */ + stack?: string; +} + +/** + * An Exception with a message. + */ +export declare interface ExceptionWithMessage { + /** The code. */ + code?: string | number; + /** The message. */ + message: string; + /** The name. */ + name?: string; + /** The stack. */ + stack?: string; +} + +/** + * An Exception with a name. + */ +export declare interface ExceptionWithName { + /** The code. */ + code?: string | number; + /** The message. */ + message?: string; + /** The name. */ + name: string; + /** The stack. */ + stack?: string; +} + +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +export declare function extractSpanContextFromTraceParentHeader(traceParentHeader: string): SpanContext | undefined; + +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; + +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; + +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +export declare function getTraceParentHeader(spanContext: SpanContext): string | undefined; + +/** + * Retrieves a tracer from the global tracer provider. + */ +export declare function getTracer(): Tracer; + +/** + * Retrieves a tracer from the global tracer provider. + */ +export declare function getTracer(name: string, version?: string): Tracer; + +/** + * Represents high resolution time. + */ +export declare type HrTime = [number, number]; + +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +export declare function isSpanContextValid(context: SpanContext): boolean; + +/** + * Used to specify a span that is linked to another. + */ +export declare interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} + +/** + * Tracing options to set on an operation. + */ +export declare interface OperationTracingOptions { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context to use for created Spans. + */ + tracingContext?: Context; +} + +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; + +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; + +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export declare interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key - the key for this attribute. + * @param value - the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes - the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name - the name of the event. + * @param attributesOrStartTime - the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is TimeInput and 3rd param is undefined + * @param startTime - start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status - the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param endTime - the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name - the Span name. + */ + updateName(name: string): this; +} + +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Attributes for a Span. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} + +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; + +/** + * A light interface that tries to be structurally compatible with OpenTelemetry + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} + +/** + * The kind of span. + */ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} + +/** + * An interface that enables manual propagation of Spans + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** + * The type of Span. Default to SpanKind.INTERNAL + */ + kind?: SpanKind; + /** + * A manually specified start time for the created `Span` object. + */ + startTime?: TimeInput; +} + +/** + * The status for a span. + */ +export declare interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} + +/** SpanStatusCode */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} + +/** + * Used to represent a Time. + */ +export declare type TimeInput = HrTime | number | Date; + +/** + * Shorthand enum for common traceFlags values inside SpanContext + */ +export declare const enum TraceFlags { + /** No flag set. */ + NONE = 0, + /** Caller is collecting trace information. */ + SAMPLED = 1 +} + +/** + * A Tracer. + */ +export declare interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method does NOT modify the current Context. + * + * @param name - The name of the span + * @param options - SpanOptions used for span creation + * @param context - Context to use to extract parent + * @returns The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; +} + +/** + * TraceState. + */ +export declare interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key - key of the TraceState entry. + * @param value - value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key - the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key - with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} + +export { } diff --git a/node_modules/@azure/core-util/LICENSE b/node_modules/@azure/core-util/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-util/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-util/README.md b/node_modules/@azure/core-util/README.md new file mode 100644 index 0000000..516ba51 --- /dev/null +++ b/node_modules/@azure/core-util/README.md @@ -0,0 +1,40 @@ +# Azure Core Util client library for JavaScript (Experimental) + +This library is intended to provide various shared utility functions for client SDK packages. + +## Getting started + +### Requirements + +### Currently supported environments + +- [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Installation + +This package is primarily used in authoring client SDKs and not meant to be consumed directly by end users. + +## Key concepts + +Utility methods provided by this library should be stateless. + +## Examples + +Examples can be found in the `samples` folder. + +## Next steps + +Look at usage in dependent client SDKs. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-util%2FREADME.png) diff --git a/node_modules/@azure/core-util/dist/browser/aborterUtils.d.ts b/node_modules/@azure/core-util/dist/browser/aborterUtils.d.ts new file mode 100644 index 0000000..89a2f98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/aborterUtils.d.ts @@ -0,0 +1,27 @@ +import type { AbortSignalLike } from "@azure/abort-controller"; +/** + * Options related to abort controller. + */ +export interface AbortOptions { + /** + * The abortSignal associated with containing operation. + */ + abortSignal?: AbortSignalLike; + /** + * The abort error message associated with containing operation. + */ + abortErrorMsg?: string; +} +/** + * Represents a function that returns a promise that can be aborted. + */ +export type AbortablePromiseBuilder = (abortOptions: { + abortSignal?: AbortSignalLike; +}) => Promise; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export declare function cancelablePromiseRace(abortablePromiseBuilders: AbortablePromiseBuilder[], options?: { + abortSignal?: AbortSignalLike; +}): Promise; +//# sourceMappingURL=aborterUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/aborterUtils.d.ts.map b/node_modules/@azure/core-util/dist/browser/aborterUtils.d.ts.map new file mode 100644 index 0000000..e7f705b --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/aborterUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.d.ts","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,MAAM,uBAAuB,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE;IACtD,WAAW,CAAC,EAAE,eAAe,CAAC;CAC/B,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC;AAEjB;;GAEG;AACH,wBAAsB,qBAAqB,CAAC,CAAC,SAAS,OAAO,EAAE,EAC7D,wBAAwB,EAAE,uBAAuB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,EAC9D,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,GAC1C,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAcpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/aborterUtils.js b/node_modules/@azure/core-util/dist/browser/aborterUtils.js new file mode 100644 index 0000000..c34a95d --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/aborterUtils.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export async function cancelablePromiseRace(abortablePromiseBuilders, options) { + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); + } + options?.abortSignal?.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + options?.abortSignal?.removeEventListener("abort", abortHandler); + } +} +//# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/aborterUtils.js.map b/node_modules/@azure/core-util/dist/browser/aborterUtils.js.map new file mode 100644 index 0000000..ff71dce --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/aborterUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.js","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAyBlC;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,wBAA8D,EAC9D,OAA2C;IAE3C,MAAM,OAAO,GAAG,IAAI,eAAe,EAAE,CAAC;IACtC,SAAS,YAAY;QACnB,OAAO,CAAC,KAAK,EAAE,CAAC;IAClB,CAAC;IACD,OAAO,EAAE,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IAC9D,IAAI,CAAC;QACH,OAAO,MAAM,OAAO,CAAC,IAAI,CACvB,wBAAwB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CACxE,CAAC;IACJ,CAAC;YAAS,CAAC;QACT,OAAO,CAAC,KAAK,EAAE,CAAC;QAChB,OAAO,EAAE,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IACnE,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options related to abort controller.\n */\nexport interface AbortOptions {\n /**\n * The abortSignal associated with containing operation.\n */\n abortSignal?: AbortSignalLike;\n /**\n * The abort error message associated with containing operation.\n */\n abortErrorMsg?: string;\n}\n\n/**\n * Represents a function that returns a promise that can be aborted.\n */\nexport type AbortablePromiseBuilder = (abortOptions: {\n abortSignal?: AbortSignalLike;\n}) => Promise;\n\n/**\n * promise.race() wrapper that aborts rest of promises as soon as the first promise settles.\n */\nexport async function cancelablePromiseRace(\n abortablePromiseBuilders: AbortablePromiseBuilder[],\n options?: { abortSignal?: AbortSignalLike },\n): Promise {\n const aborter = new AbortController();\n function abortHandler(): void {\n aborter.abort();\n }\n options?.abortSignal?.addEventListener(\"abort\", abortHandler);\n try {\n return await Promise.race(\n abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal })),\n );\n } finally {\n aborter.abort();\n options?.abortSignal?.removeEventListener(\"abort\", abortHandler);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/bytesEncoding-browser.d.mts.map b/node_modules/@azure/core-util/dist/browser/bytesEncoding-browser.d.mts.map new file mode 100644 index 0000000..b36c2e5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/bytesEncoding-browser.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding-browser.d.mts","sourceRoot":"","sources":["../../src/bytesEncoding-browser.mts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IAEb,SAAS,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAAC;IACrC,SAAS,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAAC;CACtC;AAED,4CAA4C;AAC5C,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,QAAQ,GAAG,WAAW,GAAG,KAAK,CAAC;AAEpE;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAWlF;AAED;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,GAAG,UAAU,CAWlF;AAED;;;GAGG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAE5D;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAE/D;AAED;;;GAGG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAIhE;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAE/D;AAED;;;GAGG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEhE;AAED;;;GAGG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAE5D;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAG/D;AAID;;;GAGG;AACH,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAe/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/bytesEncoding-browser.mjs.map b/node_modules/@azure/core-util/dist/browser/bytesEncoding-browser.mjs.map new file mode 100644 index 0000000..bd717b9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/bytesEncoding-browser.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding-browser.mjs","sourceRoot":"","sources":["../../src/bytesEncoding-browser.mts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAWlC;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAiB,EAAE,MAAoB;IACxE,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,OAAO;YACV,OAAO,sBAAsB,CAAC,KAAK,CAAC,CAAC;QACvC,KAAK,QAAQ;YACX,OAAO,kBAAkB,CAAC,KAAK,CAAC,CAAC;QACnC,KAAK,WAAW;YACd,OAAO,qBAAqB,CAAC,KAAK,CAAC,CAAC;QACtC,KAAK,KAAK;YACR,OAAO,qBAAqB,CAAC,KAAK,CAAC,CAAC;IACxC,CAAC;AACH,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAa,EAAE,MAAoB;IACpE,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,OAAO;YACV,OAAO,sBAAsB,CAAC,KAAK,CAAC,CAAC;QACvC,KAAK,QAAQ;YACX,OAAO,kBAAkB,CAAC,KAAK,CAAC,CAAC;QACnC,KAAK,WAAW;YACd,OAAO,qBAAqB,CAAC,KAAK,CAAC,CAAC;QACtC,KAAK,KAAK;YACR,OAAO,qBAAqB,CAAC,KAAK,CAAC,CAAC;IACxC,CAAC;AACH,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAiB;IAClD,OAAO,IAAI,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CAAC,KAAiB;IACrD,OAAO,kBAAkB,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;AAC7F,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,sBAAsB,CAAC,KAAiB;IACtD,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;IAClC,MAAM,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACzC,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CAAC,KAAiB;IACrD,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,sBAAsB,CAAC,KAAa;IAClD,OAAO,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAa;IAC9C,OAAO,IAAI,UAAU,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CAAC,KAAa;IACjD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;IACjE,OAAO,kBAAkB,CAAC,YAAY,CAAC,CAAC;AAC1C,CAAC;AAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,wBAAwB,CAAC,CAAC;AAEpD;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CAAC,KAAa;IACjD,sGAAsG;IACtG,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC/C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC;QAC1C,MAAM,UAAU,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAChC,MAAM,SAAS,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;QACnC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC;YAC5D,oFAAoF;YACpF,OAAO,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC3B,CAAC;QAED,KAAK,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,GAAG,UAAU,GAAG,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;IACvD,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n // stub these out for the browser\n function btoa(input: string): string;\n function atob(input: string): string;\n}\n\n/** The supported character encoding type */\nexport type EncodingType = \"utf-8\" | \"base64\" | \"base64url\" | \"hex\";\n\n/**\n * The helper that transforms bytes with specific character encoding into string\n * @param bytes - the uint8array bytes\n * @param format - the format we use to encode the byte\n * @returns a string of the encoded string\n */\nexport function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string {\n switch (format) {\n case \"utf-8\":\n return uint8ArrayToUtf8String(bytes);\n case \"base64\":\n return uint8ArrayToBase64(bytes);\n case \"base64url\":\n return uint8ArrayToBase64Url(bytes);\n case \"hex\":\n return uint8ArrayToHexString(bytes);\n }\n}\n\n/**\n * The helper that transforms string to specific character encoded bytes array.\n * @param value - the string to be converted\n * @param format - the format we use to decode the value\n * @returns a uint8array\n */\nexport function stringToUint8Array(value: string, format: EncodingType): Uint8Array {\n switch (format) {\n case \"utf-8\":\n return utf8StringToUint8Array(value);\n case \"base64\":\n return base64ToUint8Array(value);\n case \"base64url\":\n return base64UrlToUint8Array(value);\n case \"hex\":\n return hexStringToUint8Array(value);\n }\n}\n\n/**\n * Decodes a Uint8Array into a Base64 string.\n * @internal\n */\nexport function uint8ArrayToBase64(bytes: Uint8Array): string {\n return btoa([...bytes].map((x) => String.fromCharCode(x)).join(\"\"));\n}\n\n/**\n * Decodes a Uint8Array into a Base64Url string.\n * @internal\n */\nexport function uint8ArrayToBase64Url(bytes: Uint8Array): string {\n return uint8ArrayToBase64(bytes).replace(/\\+/g, \"-\").replace(/\\//g, \"_\").replace(/=/g, \"\");\n}\n\n/**\n * Decodes a Uint8Array into a javascript string.\n * @internal\n */\nexport function uint8ArrayToUtf8String(bytes: Uint8Array): string {\n const decoder = new TextDecoder();\n const dataString = decoder.decode(bytes);\n return dataString;\n}\n\n/**\n * Decodes a Uint8Array into a hex string\n * @internal\n */\nexport function uint8ArrayToHexString(bytes: Uint8Array): string {\n return [...bytes].map((x) => x.toString(16).padStart(2, \"0\")).join(\"\");\n}\n\n/**\n * Encodes a JavaScript string into a Uint8Array.\n * @internal\n */\nexport function utf8StringToUint8Array(value: string): Uint8Array {\n return new TextEncoder().encode(value);\n}\n\n/**\n * Encodes a Base64 string into a Uint8Array.\n * @internal\n */\nexport function base64ToUint8Array(value: string): Uint8Array {\n return new Uint8Array([...atob(value)].map((x) => x.charCodeAt(0)));\n}\n\n/**\n * Encodes a Base64Url string into a Uint8Array.\n * @internal\n */\nexport function base64UrlToUint8Array(value: string): Uint8Array {\n const base64String = value.replace(/-/g, \"+\").replace(/_/g, \"/\");\n return base64ToUint8Array(base64String);\n}\n\nconst hexDigits = new Set(\"0123456789abcdefABCDEF\");\n\n/**\n * Encodes a hex string into a Uint8Array\n * @internal\n */\nexport function hexStringToUint8Array(value: string): Uint8Array {\n // If value has odd length, the last character will be ignored, consistent with NodeJS Buffer behavior\n const bytes = new Uint8Array(value.length / 2);\n for (let i = 0; i < value.length / 2; ++i) {\n const highNibble = value[2 * i];\n const lowNibble = value[2 * i + 1];\n if (!hexDigits.has(highNibble) || !hexDigits.has(lowNibble)) {\n // Replicate Node Buffer behavior by exiting early when we encounter an invalid byte\n return bytes.slice(0, i);\n }\n\n bytes[i] = parseInt(`${highNibble}${lowNibble}`, 16);\n }\n\n return bytes;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/bytesEncoding.d.ts b/node_modules/@azure/core-util/dist/browser/bytesEncoding.d.ts new file mode 100644 index 0000000..5577131 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/bytesEncoding.d.ts @@ -0,0 +1,61 @@ +declare global { + function btoa(input: string): string; + function atob(input: string): string; +} +/** The supported character encoding type */ +export type EncodingType = "utf-8" | "base64" | "base64url" | "hex"; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export declare function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string; +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export declare function stringToUint8Array(value: string, format: EncodingType): Uint8Array; +/** + * Decodes a Uint8Array into a Base64 string. + * @internal + */ +export declare function uint8ArrayToBase64(bytes: Uint8Array): string; +/** + * Decodes a Uint8Array into a Base64Url string. + * @internal + */ +export declare function uint8ArrayToBase64Url(bytes: Uint8Array): string; +/** + * Decodes a Uint8Array into a javascript string. + * @internal + */ +export declare function uint8ArrayToUtf8String(bytes: Uint8Array): string; +/** + * Decodes a Uint8Array into a hex string + * @internal + */ +export declare function uint8ArrayToHexString(bytes: Uint8Array): string; +/** + * Encodes a JavaScript string into a Uint8Array. + * @internal + */ +export declare function utf8StringToUint8Array(value: string): Uint8Array; +/** + * Encodes a Base64 string into a Uint8Array. + * @internal + */ +export declare function base64ToUint8Array(value: string): Uint8Array; +/** + * Encodes a Base64Url string into a Uint8Array. + * @internal + */ +export declare function base64UrlToUint8Array(value: string): Uint8Array; +/** + * Encodes a hex string into a Uint8Array + * @internal + */ +export declare function hexStringToUint8Array(value: string): Uint8Array; +//# sourceMappingURL=bytesEncoding-browser.d.mts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/bytesEncoding.js b/node_modules/@azure/core-util/dist/browser/bytesEncoding.js new file mode 100644 index 0000000..110dc74 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/bytesEncoding.js @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export function uint8ArrayToString(bytes, format) { + switch (format) { + case "utf-8": + return uint8ArrayToUtf8String(bytes); + case "base64": + return uint8ArrayToBase64(bytes); + case "base64url": + return uint8ArrayToBase64Url(bytes); + case "hex": + return uint8ArrayToHexString(bytes); + } +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export function stringToUint8Array(value, format) { + switch (format) { + case "utf-8": + return utf8StringToUint8Array(value); + case "base64": + return base64ToUint8Array(value); + case "base64url": + return base64UrlToUint8Array(value); + case "hex": + return hexStringToUint8Array(value); + } +} +/** + * Decodes a Uint8Array into a Base64 string. + * @internal + */ +export function uint8ArrayToBase64(bytes) { + return btoa([...bytes].map((x) => String.fromCharCode(x)).join("")); +} +/** + * Decodes a Uint8Array into a Base64Url string. + * @internal + */ +export function uint8ArrayToBase64Url(bytes) { + return uint8ArrayToBase64(bytes).replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, ""); +} +/** + * Decodes a Uint8Array into a javascript string. + * @internal + */ +export function uint8ArrayToUtf8String(bytes) { + const decoder = new TextDecoder(); + const dataString = decoder.decode(bytes); + return dataString; +} +/** + * Decodes a Uint8Array into a hex string + * @internal + */ +export function uint8ArrayToHexString(bytes) { + return [...bytes].map((x) => x.toString(16).padStart(2, "0")).join(""); +} +/** + * Encodes a JavaScript string into a Uint8Array. + * @internal + */ +export function utf8StringToUint8Array(value) { + return new TextEncoder().encode(value); +} +/** + * Encodes a Base64 string into a Uint8Array. + * @internal + */ +export function base64ToUint8Array(value) { + return new Uint8Array([...atob(value)].map((x) => x.charCodeAt(0))); +} +/** + * Encodes a Base64Url string into a Uint8Array. + * @internal + */ +export function base64UrlToUint8Array(value) { + const base64String = value.replace(/-/g, "+").replace(/_/g, "/"); + return base64ToUint8Array(base64String); +} +const hexDigits = new Set("0123456789abcdefABCDEF"); +/** + * Encodes a hex string into a Uint8Array + * @internal + */ +export function hexStringToUint8Array(value) { + // If value has odd length, the last character will be ignored, consistent with NodeJS Buffer behavior + const bytes = new Uint8Array(value.length / 2); + for (let i = 0; i < value.length / 2; ++i) { + const highNibble = value[2 * i]; + const lowNibble = value[2 * i + 1]; + if (!hexDigits.has(highNibble) || !hexDigits.has(lowNibble)) { + // Replicate Node Buffer behavior by exiting early when we encounter an invalid byte + return bytes.slice(0, i); + } + bytes[i] = parseInt(`${highNibble}${lowNibble}`, 16); + } + return bytes; +} +//# sourceMappingURL=bytesEncoding-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/checkEnvironment.d.ts b/node_modules/@azure/core-util/dist/browser/checkEnvironment.d.ts new file mode 100644 index 0000000..63273c1 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/checkEnvironment.d.ts @@ -0,0 +1,25 @@ +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +export declare const isBrowser: boolean; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export declare const isWebWorker: boolean; +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export declare const isDeno: boolean; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export declare const isBun: boolean; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export declare const isNode: boolean; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +export declare const isReactNative: boolean; +//# sourceMappingURL=checkEnvironment.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/checkEnvironment.d.ts.map b/node_modules/@azure/core-util/dist/browser/checkEnvironment.d.ts.map new file mode 100644 index 0000000..ccff768 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/checkEnvironment.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.d.ts","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAoCA;;GAEG;AAEH,eAAO,MAAM,SAAS,SAA0E,CAAC;AAEjG;;GAEG;AACH,eAAO,MAAM,WAAW,SAKiC,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,MAAM,SAGuB,CAAC;AAE3C;;GAEG;AACH,eAAO,MAAM,KAAK,SAAmE,CAAC;AAEtF;;GAEG;AACH,eAAO,MAAM,MAAM,SAMX,CAAC;AAET;;GAEG;AAEH,eAAO,MAAM,aAAa,SACgD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/checkEnvironment.js b/node_modules/@azure/core-util/dist/browser/checkEnvironment.js new file mode 100644 index 0000000..53ba62e --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/checkEnvironment.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +export const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export const isWebWorker = typeof self === "object" && + typeof self?.importScripts === "function" && + (self.constructor?.name === "DedicatedWorkerGlobalScope" || + self.constructor?.name === "ServiceWorkerGlobalScope" || + self.constructor?.name === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export const isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export const isNode = typeof globalThis.process !== "undefined" && + Boolean(globalThis.process.version) && + Boolean(globalThis.process.versions?.node) && + // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + !isDeno && + !isBun; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +export const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +//# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/checkEnvironment.js.map b/node_modules/@azure/core-util/dist/browser/checkEnvironment.js.map new file mode 100644 index 0000000..5b1f90f --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/checkEnvironment.js.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.js","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAmClC;;GAEG;AACH,yDAAyD;AACzD,MAAM,CAAC,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,WAAW,CAAC;AAEjG;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GACtB,OAAO,IAAI,KAAK,QAAQ;IACxB,OAAO,IAAI,EAAE,aAAa,KAAK,UAAU;IACzC,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,4BAA4B;QACtD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,0BAA0B;QACrD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,yBAAyB,CAAC,CAAC;AAE1D;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,IAAI,KAAK,WAAW;IAC3B,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW;IACnC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,WAAW,CAAC;AAE3C;;GAEG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,WAAW,CAAC;AAEtF;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,UAAU,CAAC,OAAO,KAAK,WAAW;IACzC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC;IACnC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC;IAC1C,+HAA+H;IAC/H,CAAC,MAAM;IACP,CAAC,KAAK,CAAC;AAET;;GAEG;AACH,4GAA4G;AAC5G,MAAM,CAAC,MAAM,aAAa,GACxB,OAAO,SAAS,KAAK,WAAW,IAAI,SAAS,EAAE,OAAO,KAAK,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ninterface Window {\n document: unknown;\n}\n\ninterface DedicatedWorkerGlobalScope {\n constructor: {\n name: string;\n };\n\n importScripts: (...paths: string[]) => void;\n}\n\ninterface Navigator {\n product: string;\n}\n\ninterface DenoGlobal {\n version: {\n deno: string;\n };\n}\n\ninterface BunGlobal {\n version: string;\n}\n\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\ndeclare const window: Window;\ndeclare const self: DedicatedWorkerGlobalScope;\ndeclare const Deno: DenoGlobal;\ndeclare const Bun: BunGlobal;\ndeclare const navigator: Navigator;\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Browser.\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\nexport const isBrowser = typeof window !== \"undefined\" && typeof window.document !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Worker.\n */\nexport const isWebWorker =\n typeof self === \"object\" &&\n typeof self?.importScripts === \"function\" &&\n (self.constructor?.name === \"DedicatedWorkerGlobalScope\" ||\n self.constructor?.name === \"ServiceWorkerGlobalScope\" ||\n self.constructor?.name === \"SharedWorkerGlobalScope\");\n\n/**\n * A constant that indicates whether the environment the code is running is Deno.\n */\nexport const isDeno =\n typeof Deno !== \"undefined\" &&\n typeof Deno.version !== \"undefined\" &&\n typeof Deno.version.deno !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Bun.sh.\n */\nexport const isBun = typeof Bun !== \"undefined\" && typeof Bun.version !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Node.JS.\n */\nexport const isNode =\n typeof globalThis.process !== \"undefined\" &&\n Boolean(globalThis.process.version) &&\n Boolean(globalThis.process.versions?.node) &&\n // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions\n !isDeno &&\n !isBun;\n\n/**\n * A constant that indicates whether the environment the code is running is in React-Native.\n */\n// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js\nexport const isReactNative =\n typeof navigator !== \"undefined\" && navigator?.product === \"ReactNative\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/createAbortablePromise.d.ts b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.d.ts new file mode 100644 index 0000000..183c705 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.d.ts @@ -0,0 +1,16 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for the createAbortablePromise function. + */ +export interface CreateAbortablePromiseOptions extends AbortOptions { + /** A function to be called if the promise was aborted */ + cleanupBeforeAbort?: () => void; +} +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export declare function createAbortablePromise(buildPromise: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void, options?: CreateAbortablePromiseOptions): Promise; +//# sourceMappingURL=createAbortablePromise.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/createAbortablePromise.d.ts.map b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.d.ts.map new file mode 100644 index 0000000..705d2e9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.d.ts","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAEtD;;GAEG;AACH,MAAM,WAAW,6BAA8B,SAAQ,YAAY;IACjE,yDAAyD;IACzD,kBAAkB,CAAC,EAAE,MAAM,IAAI,CAAC;CACjC;AAED;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,EACtC,YAAY,EAAE,CACZ,OAAO,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,KAAK,IAAI,EAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,KAC3B,IAAI,EACT,OAAO,CAAC,EAAE,6BAA6B,GACtC,OAAO,CAAC,CAAC,CAAC,CAiCZ"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js new file mode 100644 index 0000000..fc54e7c --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new AbortError(abortErrorMsg ?? "The operation was aborted.")); + } + function removeListeners() { + abortSignal?.removeEventListener("abort", onAbort); + } + function onAbort() { + cleanupBeforeAbort?.(); + removeListeners(); + rejectOnAbort(); + } + if (abortSignal?.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); + }); + } + catch (err) { + reject(err); + } + abortSignal?.addEventListener("abort", onAbort); + }); +} +//# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js.map b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js.map new file mode 100644 index 0000000..6f928a9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.js","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAWrD;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,YAGS,EACT,OAAuC;IAEvC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACzE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,SAAS,aAAa;YACpB,MAAM,CAAC,IAAI,UAAU,CAAC,aAAa,IAAI,4BAA4B,CAAC,CAAC,CAAC;QACxE,CAAC;QACD,SAAS,eAAe;YACtB,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACrD,CAAC;QACD,SAAS,OAAO;YACd,kBAAkB,EAAE,EAAE,CAAC;YACvB,eAAe,EAAE,CAAC;YAClB,aAAa,EAAE,CAAC;QAClB,CAAC;QACD,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;YACzB,OAAO,aAAa,EAAE,CAAC;QACzB,CAAC;QACD,IAAI,CAAC;YACH,YAAY,CACV,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,OAAO,CAAC,CAAC,CAAC,CAAC;YACb,CAAC,EACD,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,MAAM,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC,CACF,CAAC;QACJ,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC;QACD,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAClD,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\nimport type { AbortOptions } from \"./aborterUtils.js\";\n\n/**\n * Options for the createAbortablePromise function.\n */\nexport interface CreateAbortablePromiseOptions extends AbortOptions {\n /** A function to be called if the promise was aborted */\n cleanupBeforeAbort?: () => void;\n}\n\n/**\n * Creates an abortable promise.\n * @param buildPromise - A function that takes the resolve and reject functions as parameters.\n * @param options - The options for the abortable promise.\n * @returns A promise that can be aborted.\n */\nexport function createAbortablePromise(\n buildPromise: (\n resolve: (value: T | PromiseLike) => void,\n reject: (reason?: any) => void,\n ) => void,\n options?: CreateAbortablePromiseOptions,\n): Promise {\n const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {};\n return new Promise((resolve, reject) => {\n function rejectOnAbort(): void {\n reject(new AbortError(abortErrorMsg ?? \"The operation was aborted.\"));\n }\n function removeListeners(): void {\n abortSignal?.removeEventListener(\"abort\", onAbort);\n }\n function onAbort(): void {\n cleanupBeforeAbort?.();\n removeListeners();\n rejectOnAbort();\n }\n if (abortSignal?.aborted) {\n return rejectOnAbort();\n }\n try {\n buildPromise(\n (x) => {\n removeListeners();\n resolve(x);\n },\n (x) => {\n removeListeners();\n reject(x);\n },\n );\n } catch (err) {\n reject(err);\n }\n abortSignal?.addEventListener(\"abort\", onAbort);\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/delay.d.ts b/node_modules/@azure/core-util/dist/browser/delay.d.ts new file mode 100644 index 0000000..9dc4b5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/delay.d.ts @@ -0,0 +1,14 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for support abort functionality for the delay method + */ +export interface DelayOptions extends AbortOptions { +} +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export declare function delay(timeInMs: number, options?: DelayOptions): Promise; +//# sourceMappingURL=delay.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/delay.d.ts.map b/node_modules/@azure/core-util/dist/browser/delay.d.ts.map new file mode 100644 index 0000000..8d2d31b --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/delay.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAKtD;;GAEG;AACH,MAAM,WAAW,YAAa,SAAQ,YAAY;CAAG;AAErD;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/delay.js b/node_modules/@azure/core-util/dist/browser/delay.js new file mode 100644 index 0000000..3e12ea5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/delay.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createAbortablePromise } from "./createAbortablePromise.js"; +const StandardAbortMessage = "The delay was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options ?? {}; + return createAbortablePromise((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + }); +} +//# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/delay.js.map b/node_modules/@azure/core-util/dist/browser/delay.js.map new file mode 100644 index 0000000..b02f90c --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/delay.js.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.js","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AAErE,MAAM,oBAAoB,GAAG,wBAAwB,CAAC;AAOtD;;;;;GAKG;AACH,MAAM,UAAU,KAAK,CAAC,QAAgB,EAAE,OAAsB;IAC5D,IAAI,KAAoC,CAAC;IACzC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrD,OAAO,sBAAsB,CAC3B,CAAC,OAAO,EAAE,EAAE;QACV,KAAK,GAAG,UAAU,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;IACxC,CAAC,EACD;QACE,kBAAkB,EAAE,GAAG,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC;QAC7C,WAAW;QACX,aAAa,EAAE,aAAa,IAAI,oBAAoB;KACrD,CACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortOptions } from \"./aborterUtils.js\";\nimport { createAbortablePromise } from \"./createAbortablePromise.js\";\n\nconst StandardAbortMessage = \"The delay was aborted.\";\n\n/**\n * Options for support abort functionality for the delay method\n */\nexport interface DelayOptions extends AbortOptions {}\n\n/**\n * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds.\n * @param timeInMs - The number of milliseconds to be delayed.\n * @param options - The options for delay - currently abort options\n * @returns Promise that is resolved after timeInMs\n */\nexport function delay(timeInMs: number, options?: DelayOptions): Promise {\n let token: ReturnType;\n const { abortSignal, abortErrorMsg } = options ?? {};\n return createAbortablePromise(\n (resolve) => {\n token = setTimeout(resolve, timeInMs);\n },\n {\n cleanupBeforeAbort: () => clearTimeout(token),\n abortSignal,\n abortErrorMsg: abortErrorMsg ?? StandardAbortMessage,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/error.d.ts b/node_modules/@azure/core-util/dist/browser/error.d.ts new file mode 100644 index 0000000..5027f12 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/error.d.ts @@ -0,0 +1,13 @@ +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export declare function isError(e: unknown): e is Error; +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export declare function getErrorMessage(e: unknown): string; +//# sourceMappingURL=error.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/error.d.ts.map b/node_modules/@azure/core-util/dist/browser/error.d.ts.map new file mode 100644 index 0000000..d56797d --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"error.d.ts","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,wBAAgB,OAAO,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,KAAK,CAO9C;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,CAAC,EAAE,OAAO,GAAG,MAAM,CAgBlD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/error.js b/node_modules/@azure/core-util/dist/browser/error.js new file mode 100644 index 0000000..8d107eb --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/error.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObject } from "./object.js"; +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export function isError(e) { + if (isObject(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; +} +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export function getErrorMessage(e) { + if (isError(e)) { + return e.message; + } + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } + else { + stringified = String(e); + } + } + catch (err) { + stringified = "[unable to stringify input]"; + } + return `Unknown error ${stringified}`; + } +} +//# sourceMappingURL=error.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/error.js.map b/node_modules/@azure/core-util/dist/browser/error.js.map new file mode 100644 index 0000000..3e33e10 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"error.js","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC;;;GAGG;AACH,MAAM,UAAU,OAAO,CAAC,CAAU;IAChC,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC;QAC3C,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,OAAO,KAAK,QAAQ,CAAC;QACjD,OAAO,OAAO,IAAI,UAAU,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,CAAU;IACxC,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACf,OAAO,CAAC,CAAC,OAAO,CAAC;IACnB,CAAC;SAAM,CAAC;QACN,IAAI,WAAmB,CAAC;QACxB,IAAI,CAAC;YACH,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,EAAE,CAAC;gBAC/B,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;iBAAM,CAAC;gBACN,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAC1B,CAAC;QACH,CAAC;QAAC,OAAO,GAAQ,EAAE,CAAC;YAClB,WAAW,GAAG,6BAA6B,CAAC;QAC9C,CAAC;QACD,OAAO,iBAAiB,WAAW,EAAE,CAAC;IACxC,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObject } from \"./object.js\";\n\n/**\n * Typeguard for an error object shape (has name and message)\n * @param e - Something caught by a catch clause.\n */\nexport function isError(e: unknown): e is Error {\n if (isObject(e)) {\n const hasName = typeof e.name === \"string\";\n const hasMessage = typeof e.message === \"string\";\n return hasName && hasMessage;\n }\n return false;\n}\n\n/**\n * Given what is thought to be an error object, return the message if possible.\n * If the message is missing, returns a stringified version of the input.\n * @param e - Something thrown from a try block\n * @returns The error message or a string of the input\n */\nexport function getErrorMessage(e: unknown): string {\n if (isError(e)) {\n return e.message;\n } else {\n let stringified: string;\n try {\n if (typeof e === \"object\" && e) {\n stringified = JSON.stringify(e);\n } else {\n stringified = String(e);\n }\n } catch (err: any) {\n stringified = \"[unable to stringify input]\";\n }\n return `Unknown error ${stringified}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/index.d.ts b/node_modules/@azure/core-util/dist/browser/index.d.ts new file mode 100644 index 0000000..14fada5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/index.d.ts @@ -0,0 +1,12 @@ +export { delay, type DelayOptions } from "./delay.js"; +export { type AbortOptions, cancelablePromiseRace, type AbortablePromiseBuilder, } from "./aborterUtils.js"; +export { createAbortablePromise, type CreateAbortablePromiseOptions, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject, type UnknownObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array, type EncodingType } from "./bytesEncoding.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/index.d.ts.map b/node_modules/@azure/core-util/dist/browser/index.d.ts.map new file mode 100644 index 0000000..77255da --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,KAAK,EAAE,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EACL,KAAK,YAAY,EACjB,qBAAqB,EACrB,KAAK,uBAAuB,GAC7B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,EACtB,KAAK,6BAA6B,GACnC,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,KAAK,YAAY,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/index.js b/node_modules/@azure/core-util/dist/browser/index.js new file mode 100644 index 0000000..64b7b80 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/index.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { delay } from "./delay.js"; +export { cancelablePromiseRace, } from "./aborterUtils.js"; +export { createAbortablePromise, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array } from "./bytesEncoding.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/index.js.map b/node_modules/@azure/core-util/dist/browser/index.js.map new file mode 100644 index 0000000..3d3e618 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,KAAK,EAAqB,MAAM,YAAY,CAAC;AACtD,OAAO,EAEL,qBAAqB,GAEtB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,GAEvB,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAsB,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAqB,MAAM,oBAAoB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { delay, type DelayOptions } from \"./delay.js\";\nexport {\n type AbortOptions,\n cancelablePromiseRace,\n type AbortablePromiseBuilder,\n} from \"./aborterUtils.js\";\nexport {\n createAbortablePromise,\n type CreateAbortablePromiseOptions,\n} from \"./createAbortablePromise.js\";\nexport { getRandomIntegerInclusive } from \"./random.js\";\nexport { isObject, type UnknownObject } from \"./object.js\";\nexport { isError, getErrorMessage } from \"./error.js\";\nexport { computeSha256Hash, computeSha256Hmac } from \"./sha256.js\";\nexport { isDefined, isObjectWithProperties, objectHasProperty } from \"./typeGuards.js\";\nexport { randomUUID } from \"./uuidUtils.js\";\nexport {\n isBrowser,\n isBun,\n isNode,\n isDeno,\n isReactNative,\n isWebWorker,\n} from \"./checkEnvironment.js\";\nexport { uint8ArrayToString, stringToUint8Array, type EncodingType } from \"./bytesEncoding.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/object.d.ts b/node_modules/@azure/core-util/dist/browser/object.d.ts new file mode 100644 index 0000000..fc3f33a --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/object.d.ts @@ -0,0 +1,12 @@ +/** + * A generic shape for a plain JS object. + */ +export type UnknownObject = { + [s: string]: unknown; +}; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=object.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/object.d.ts.map b/node_modules/@azure/core-util/dist/browser/object.d.ts.map new file mode 100644 index 0000000..66b39bd --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/object.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"object.d.ts","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/object.js b/node_modules/@azure/core-util/dist/browser/object.js new file mode 100644 index 0000000..ff04af8 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/object.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/object.js.map b/node_modules/@azure/core-util/dist/browser/object.js.map new file mode 100644 index 0000000..3a5f096 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/object.js.map @@ -0,0 +1 @@ +{"version":3,"file":"object.js","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A generic shape for a plain JS object.\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * Helper to determine when an input is a generic JS object.\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/package.json b/node_modules/@azure/core-util/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-util/dist/browser/random.d.ts b/node_modules/@azure/core-util/dist/browser/random.d.ts new file mode 100644 index 0000000..9e9631a --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/random.d.ts @@ -0,0 +1,10 @@ +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export declare function getRandomIntegerInclusive(min: number, max: number): number; +//# sourceMappingURL=random.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/random.d.ts.map b/node_modules/@azure/core-util/dist/browser/random.d.ts.map new file mode 100644 index 0000000..8a91316 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/random.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"random.d.ts","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAGA;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,MAAM,CAS1E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/random.js b/node_modules/@azure/core-util/dist/browser/random.js new file mode 100644 index 0000000..aef3288 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/random.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; +} +//# sourceMappingURL=random.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/random.js.map b/node_modules/@azure/core-util/dist/browser/random.js.map new file mode 100644 index 0000000..93d4d74 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/random.js.map @@ -0,0 +1 @@ +{"version":3,"file":"random.js","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,UAAU,yBAAyB,CAAC,GAAW,EAAE,GAAW;IAChE,iCAAiC;IACjC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACrB,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACtB,2DAA2D;IAC3D,+EAA+E;IAC/E,yEAAyE;IACzE,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;IAC3D,OAAO,MAAM,GAAG,GAAG,CAAC;AACtB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Returns a random integer value between a lower and upper bound,\n * inclusive of both bounds.\n * Note that this uses Math.random and isn't secure. If you need to use\n * this for any kind of security purpose, find a better source of random.\n * @param min - The smallest integer value allowed.\n * @param max - The largest integer value allowed.\n */\nexport function getRandomIntegerInclusive(min: number, max: number): number {\n // Make sure inputs are integers.\n min = Math.ceil(min);\n max = Math.floor(max);\n // Pick a random offset from zero to the size of the range.\n // Since Math.random() can never return 1, we have to make the range one larger\n // in order to be inclusive of the maximum value after we take the floor.\n const offset = Math.floor(Math.random() * (max - min + 1));\n return offset + min;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/sha256-browser.d.mts.map b/node_modules/@azure/core-util/dist/browser/sha256-browser.d.mts.map new file mode 100644 index 0000000..187d898 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/sha256-browser.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256-browser.d.mts","sourceRoot":"","sources":["../../src/sha256-browser.mts"],"names":[],"mappings":"AAmEA;;;;;GAKG;AACH,wBAAsB,iBAAiB,CACrC,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,MAAM,EACpB,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAyBjB;AAED;;;;GAIG;AACH,wBAAsB,iBAAiB,CACrC,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAKjB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/sha256-browser.mjs.map b/node_modules/@azure/core-util/dist/browser/sha256-browser.mjs.map new file mode 100644 index 0000000..1bc66d3 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/sha256-browser.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256-browser.mjs","sourceRoot":"","sources":["../../src/sha256-browser.mts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AA6C5E,IAAI,YAAsC,CAAC;AAE3C;;;GAGG;AACH,SAAS,SAAS;IAChB,IAAI,YAAY,EAAE,CAAC;QACjB,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;QACxC,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;IACvF,CAAC;IAED,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC;IAClC,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,GAAW,EACX,YAAoB,EACpB,QAA0B;IAE1B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;IAC3B,MAAM,QAAQ,GAAG,kBAAkB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;IACnD,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;IAEpE,MAAM,SAAS,GAAG,MAAM,MAAM,CAAC,SAAS,CACtC,KAAK,EACL,QAAQ,EACR;QACE,IAAI,EAAE,MAAM;QACZ,IAAI,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE;KAC1B,EACD,KAAK,EACL,CAAC,MAAM,CAAC,CACT,CAAC;IACF,MAAM,SAAS,GAAG,MAAM,MAAM,CAAC,IAAI,CACjC;QACE,IAAI,EAAE,MAAM;QACZ,IAAI,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE;KAC1B,EACD,SAAS,EACT,iBAAiB,CAClB,CAAC;IAEF,OAAO,kBAAkB,CAAC,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,QAAQ,CAAC,CAAC;AACjE,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,OAAe,EACf,QAA0B;IAE1B,MAAM,YAAY,GAAG,kBAAkB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1D,MAAM,MAAM,GAAG,MAAM,SAAS,EAAE,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,EAAE,YAAY,CAAC,CAAC;IAE3E,OAAO,kBAAkB,CAAC,IAAI,UAAU,CAAC,MAAM,CAAC,EAAE,QAAQ,CAAC,CAAC;AAC9D,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { stringToUint8Array, uint8ArrayToString } from \"./bytesEncoding.js\";\n\n// stubs for browser self.crypto\ninterface JsonWebKey {}\ninterface CryptoKey {}\ntype KeyUsage =\n | \"decrypt\"\n | \"deriveBits\"\n | \"deriveKey\"\n | \"encrypt\"\n | \"sign\"\n | \"unwrapKey\"\n | \"verify\"\n | \"wrapKey\";\ninterface Algorithm {\n name: string;\n}\ninterface SubtleCrypto {\n importKey(\n format: string,\n keyData: JsonWebKey,\n algorithm: HmacImportParams,\n extractable: boolean,\n usage: KeyUsage[],\n ): Promise;\n sign(\n algorithm: HmacImportParams,\n key: CryptoKey,\n data: ArrayBufferView | ArrayBuffer,\n ): Promise;\n digest(algorithm: Algorithm, data: ArrayBufferView | ArrayBuffer): Promise;\n}\ninterface Crypto {\n readonly subtle: SubtleCrypto;\n getRandomValues(array: T): T;\n}\ndeclare const self: {\n crypto: Crypto;\n};\ninterface HmacImportParams {\n name: string;\n hash: Algorithm;\n length?: number;\n}\n\nlet subtleCrypto: SubtleCrypto | undefined;\n\n/**\n * Returns a cached reference to the Web API crypto.subtle object.\n * @internal\n */\nfunction getCrypto(): SubtleCrypto {\n if (subtleCrypto) {\n return subtleCrypto;\n }\n\n if (!self.crypto || !self.crypto.subtle) {\n throw new Error(\"Your browser environment does not support cryptography functions.\");\n }\n\n subtleCrypto = self.crypto.subtle;\n return subtleCrypto;\n}\n\n/**\n * Generates a SHA-256 HMAC signature.\n * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash.\n * @param stringToSign - The data to be signed.\n * @param encoding - The textual encoding to use for the returned HMAC digest.\n */\nexport async function computeSha256Hmac(\n key: string,\n stringToSign: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n const crypto = getCrypto();\n const keyBytes = stringToUint8Array(key, \"base64\");\n const stringToSignBytes = stringToUint8Array(stringToSign, \"utf-8\");\n\n const cryptoKey = await crypto.importKey(\n \"raw\",\n keyBytes,\n {\n name: \"HMAC\",\n hash: { name: \"SHA-256\" },\n },\n false,\n [\"sign\"],\n );\n const signature = await crypto.sign(\n {\n name: \"HMAC\",\n hash: { name: \"SHA-256\" },\n },\n cryptoKey,\n stringToSignBytes,\n );\n\n return uint8ArrayToString(new Uint8Array(signature), encoding);\n}\n\n/**\n * Generates a SHA-256 hash.\n * @param content - The data to be included in the hash.\n * @param encoding - The textual encoding to use for the returned hash.\n */\nexport async function computeSha256Hash(\n content: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n const contentBytes = stringToUint8Array(content, \"utf-8\");\n const digest = await getCrypto().digest({ name: \"SHA-256\" }, contentBytes);\n\n return uint8ArrayToString(new Uint8Array(digest), encoding);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/sha256.d.ts b/node_modules/@azure/core-util/dist/browser/sha256.d.ts new file mode 100644 index 0000000..ca929e5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/sha256.d.ts @@ -0,0 +1,14 @@ +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export declare function computeSha256Hmac(key: string, stringToSign: string, encoding: "base64" | "hex"): Promise; +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export declare function computeSha256Hash(content: string, encoding: "base64" | "hex"): Promise; +//# sourceMappingURL=sha256-browser.d.mts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/sha256.js b/node_modules/@azure/core-util/dist/browser/sha256.js new file mode 100644 index 0000000..6465802 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/sha256.js @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { stringToUint8Array, uint8ArrayToString } from "./bytesEncoding.js"; +let subtleCrypto; +/** + * Returns a cached reference to the Web API crypto.subtle object. + * @internal + */ +function getCrypto() { + if (subtleCrypto) { + return subtleCrypto; + } + if (!self.crypto || !self.crypto.subtle) { + throw new Error("Your browser environment does not support cryptography functions."); + } + subtleCrypto = self.crypto.subtle; + return subtleCrypto; +} +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export async function computeSha256Hmac(key, stringToSign, encoding) { + const crypto = getCrypto(); + const keyBytes = stringToUint8Array(key, "base64"); + const stringToSignBytes = stringToUint8Array(stringToSign, "utf-8"); + const cryptoKey = await crypto.importKey("raw", keyBytes, { + name: "HMAC", + hash: { name: "SHA-256" }, + }, false, ["sign"]); + const signature = await crypto.sign({ + name: "HMAC", + hash: { name: "SHA-256" }, + }, cryptoKey, stringToSignBytes); + return uint8ArrayToString(new Uint8Array(signature), encoding); +} +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export async function computeSha256Hash(content, encoding) { + const contentBytes = stringToUint8Array(content, "utf-8"); + const digest = await getCrypto().digest({ name: "SHA-256" }, contentBytes); + return uint8ArrayToString(new Uint8Array(digest), encoding); +} +//# sourceMappingURL=sha256-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/typeGuards.d.ts b/node_modules/@azure/core-util/dist/browser/typeGuards.d.ts new file mode 100644 index 0000000..50ccc5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/typeGuards.d.ts @@ -0,0 +1,18 @@ +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export declare function isObjectWithProperties(thing: Thing, properties: PropertyName[]): thing is Thing & Record; +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export declare function objectHasProperty(thing: Thing, property: PropertyName): thing is Thing & Record; +//# sourceMappingURL=typeGuards.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/typeGuards.d.ts.map b/node_modules/@azure/core-util/dist/browser/typeGuards.d.ts.map new file mode 100644 index 0000000..f20f041 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/typeGuards.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.d.ts","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,CAEpE;AAED;;;;GAIG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EACvE,KAAK,EAAE,KAAK,EACZ,UAAU,EAAE,YAAY,EAAE,GACzB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAYhD;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EAClE,KAAK,EAAE,KAAK,EACZ,QAAQ,EAAE,YAAY,GACrB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAIhD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/typeGuards.js b/node_modules/@azure/core-util/dist/browser/typeGuards.js new file mode 100644 index 0000000..f45852d --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/typeGuards.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/typeGuards.js.map b/node_modules/@azure/core-util/dist/browser/typeGuards.js.map new file mode 100644 index 0000000..f6586c3 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/typeGuards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.js","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,SAAS,CAAI,KAA2B;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;QACnD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE,CAAC;QAClC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACxC,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAC/B,KAAY,EACZ,QAAsB;IAEtB,OAAO,CACL,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAChG,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[],\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n */\nexport function objectHasProperty(\n thing: Thing,\n property: PropertyName,\n): thing is Thing & Record {\n return (\n isDefined(thing) && typeof thing === \"object\" && property in (thing as Record)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils-browser.d.mts.map b/node_modules/@azure/core-util/dist/browser/uuidUtils-browser.d.mts.map new file mode 100644 index 0000000..35be76c --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils-browser.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils-browser.d.mts","sourceRoot":"","sources":["../../src/uuidUtils-browser.mts"],"names":[],"mappings":"AAmBA;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils-browser.mjs.map b/node_modules/@azure/core-util/dist/browser/uuidUtils-browser.mjs.map new file mode 100644 index 0000000..990f11d --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils-browser.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils-browser.mjs","sourceRoot":"","sources":["../../src/uuidUtils-browser.mts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAUrD,gEAAgE;AAChE,MAAM,YAAY,GAChB,OAAO,UAAU,EAAE,MAAM,EAAE,UAAU,KAAK,UAAU;IAClD,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IACtD,CAAC,CAAC,YAAY,CAAC;AAEnB;;;;GAIG;AACH,MAAM,UAAU,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { generateUUID } from \"./uuidUtils.common.js\";\n\ninterface Crypto {\n randomUUID(): string;\n}\n\ndeclare const globalThis: {\n crypto: Crypto;\n};\n\n// NOTE: This could be undefined if not used in a secure context\nconst uuidFunction =\n typeof globalThis?.crypto?.randomUUID === \"function\"\n ? globalThis.crypto.randomUUID.bind(globalThis.crypto)\n : generateUUID;\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return uuidFunction();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.common.d.ts b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.d.ts new file mode 100644 index 0000000..8f1c9ba --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.d.ts @@ -0,0 +1,13 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUUID(): string; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.common.d.ts.map b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.d.ts.map new file mode 100644 index 0000000..0d19e4a --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.d.ts","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAqBrC;AAED;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.common.js b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.js new file mode 100644 index 0000000..8484013 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function generateUUID() { + let uuid = ""; + for (let i = 0; i < 32; i++) { + // Generate a random number between 0 and 15 + const randomNumber = Math.floor(Math.random() * 16); + // Set the UUID version to 4 in the 13th position + if (i === 12) { + uuid += "4"; + } + else if (i === 16) { + // Set the UUID variant to "10" in the 17th position + uuid += (randomNumber & 0x3) | 0x8; + } + else { + // Add a random hexadecimal digit to the UUID string + uuid += randomNumber.toString(16); + } + // Add hyphens to the UUID string at the appropriate positions + if (i === 7 || i === 11 || i === 15 || i === 19) { + uuid += "-"; + } + } + return uuid; +} +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function randomUUID() { + return generateUUID(); +} +//# sourceMappingURL=uuidUtils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.common.js.map b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.js.map new file mode 100644 index 0000000..78e1c16 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.js","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,IAAI,IAAI,GAAG,EAAE,CAAC;IACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5B,4CAA4C;QAC5C,MAAM,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC;QACpD,iDAAiD;QACjD,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACb,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;aAAM,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACpB,oDAAoD;YACpD,IAAI,IAAI,CAAC,YAAY,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QACrC,CAAC;aAAM,CAAC;YACN,oDAAoD;YACpD,IAAI,IAAI,YAAY,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;QACD,8DAA8D;QAC9D,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YAChD,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUUID(): string {\n let uuid = \"\";\n for (let i = 0; i < 32; i++) {\n // Generate a random number between 0 and 15\n const randomNumber = Math.floor(Math.random() * 16);\n // Set the UUID version to 4 in the 13th position\n if (i === 12) {\n uuid += \"4\";\n } else if (i === 16) {\n // Set the UUID variant to \"10\" in the 17th position\n uuid += (randomNumber & 0x3) | 0x8;\n } else {\n // Add a random hexadecimal digit to the UUID string\n uuid += randomNumber.toString(16);\n }\n // Add hyphens to the UUID string at the appropriate positions\n if (i === 7 || i === 11 || i === 15 || i === 19) {\n uuid += \"-\";\n }\n }\n return uuid;\n}\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return generateUUID();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.d.ts b/node_modules/@azure/core-util/dist/browser/uuidUtils.d.ts new file mode 100644 index 0000000..b6c76b1 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.d.ts @@ -0,0 +1,7 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils-browser.d.mts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.js b/node_modules/@azure/core-util/dist/browser/uuidUtils.js new file mode 100644 index 0000000..d062906 --- /dev/null +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.js @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { generateUUID } from "./uuidUtils.common.js"; +// NOTE: This could be undefined if not used in a secure context +const uuidFunction = typeof globalThis?.crypto?.randomUUID === "function" + ? globalThis.crypto.randomUUID.bind(globalThis.crypto) + : generateUUID; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function randomUUID() { + return uuidFunction(); +} +//# sourceMappingURL=uuidUtils-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/aborterUtils.d.ts b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.d.ts new file mode 100644 index 0000000..89a2f98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.d.ts @@ -0,0 +1,27 @@ +import type { AbortSignalLike } from "@azure/abort-controller"; +/** + * Options related to abort controller. + */ +export interface AbortOptions { + /** + * The abortSignal associated with containing operation. + */ + abortSignal?: AbortSignalLike; + /** + * The abort error message associated with containing operation. + */ + abortErrorMsg?: string; +} +/** + * Represents a function that returns a promise that can be aborted. + */ +export type AbortablePromiseBuilder = (abortOptions: { + abortSignal?: AbortSignalLike; +}) => Promise; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export declare function cancelablePromiseRace(abortablePromiseBuilders: AbortablePromiseBuilder[], options?: { + abortSignal?: AbortSignalLike; +}): Promise; +//# sourceMappingURL=aborterUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/aborterUtils.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.d.ts.map new file mode 100644 index 0000000..e7f705b --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.d.ts","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,MAAM,uBAAuB,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE;IACtD,WAAW,CAAC,EAAE,eAAe,CAAC;CAC/B,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC;AAEjB;;GAEG;AACH,wBAAsB,qBAAqB,CAAC,CAAC,SAAS,OAAO,EAAE,EAC7D,wBAAwB,EAAE,uBAAuB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,EAC9D,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,GAC1C,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAcpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js new file mode 100644 index 0000000..d4b87c4 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.cancelablePromiseRace = void 0; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +async function cancelablePromiseRace(abortablePromiseBuilders, options) { + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); + } + options?.abortSignal?.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + options?.abortSignal?.removeEventListener("abort", abortHandler); + } +} +exports.cancelablePromiseRace = cancelablePromiseRace; +//# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js.map b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js.map new file mode 100644 index 0000000..f214ad2 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.js","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAyBlC;;GAEG;AACI,KAAK,UAAU,qBAAqB,CACzC,wBAA8D,EAC9D,OAA2C;IAE3C,MAAM,OAAO,GAAG,IAAI,eAAe,EAAE,CAAC;IACtC,SAAS,YAAY;QACnB,OAAO,CAAC,KAAK,EAAE,CAAC;IAClB,CAAC;IACD,OAAO,EAAE,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IAC9D,IAAI,CAAC;QACH,OAAO,MAAM,OAAO,CAAC,IAAI,CACvB,wBAAwB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CACxE,CAAC;IACJ,CAAC;YAAS,CAAC;QACT,OAAO,CAAC,KAAK,EAAE,CAAC;QAChB,OAAO,EAAE,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IACnE,CAAC;AACH,CAAC;AAjBD,sDAiBC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options related to abort controller.\n */\nexport interface AbortOptions {\n /**\n * The abortSignal associated with containing operation.\n */\n abortSignal?: AbortSignalLike;\n /**\n * The abort error message associated with containing operation.\n */\n abortErrorMsg?: string;\n}\n\n/**\n * Represents a function that returns a promise that can be aborted.\n */\nexport type AbortablePromiseBuilder = (abortOptions: {\n abortSignal?: AbortSignalLike;\n}) => Promise;\n\n/**\n * promise.race() wrapper that aborts rest of promises as soon as the first promise settles.\n */\nexport async function cancelablePromiseRace(\n abortablePromiseBuilders: AbortablePromiseBuilder[],\n options?: { abortSignal?: AbortSignalLike },\n): Promise {\n const aborter = new AbortController();\n function abortHandler(): void {\n aborter.abort();\n }\n options?.abortSignal?.addEventListener(\"abort\", abortHandler);\n try {\n return await Promise.race(\n abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal })),\n );\n } finally {\n aborter.abort();\n options?.abortSignal?.removeEventListener(\"abort\", abortHandler);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.d.ts b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.d.ts new file mode 100644 index 0000000..48a9754 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.d.ts @@ -0,0 +1,17 @@ +/** The supported character encoding type */ +export type EncodingType = "utf-8" | "base64" | "base64url" | "hex"; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export declare function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string; +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export declare function stringToUint8Array(value: string, format: EncodingType): Uint8Array; +//# sourceMappingURL=bytesEncoding.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.d.ts.map new file mode 100644 index 0000000..700cf2c --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding.d.ts","sourceRoot":"","sources":["../../src/bytesEncoding.ts"],"names":[],"mappings":"AAGA,4CAA4C;AAC5C,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,QAAQ,GAAG,WAAW,GAAG,KAAK,CAAC;AAEpE;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAElF;AAED;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,GAAG,UAAU,CAElF"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.js b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.js new file mode 100644 index 0000000..f320d9a --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.js @@ -0,0 +1,26 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stringToUint8Array = exports.uint8ArrayToString = void 0; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +exports.uint8ArrayToString = uint8ArrayToString; +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} +exports.stringToUint8Array = stringToUint8Array; +//# sourceMappingURL=bytesEncoding.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.js.map b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.js.map new file mode 100644 index 0000000..99e337e --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/bytesEncoding.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding.js","sourceRoot":"","sources":["../../src/bytesEncoding.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAKlC;;;;;GAKG;AACH,SAAgB,kBAAkB,CAAC,KAAiB,EAAE,MAAoB;IACxE,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC7C,CAAC;AAFD,gDAEC;AAED;;;;;GAKG;AACH,SAAgB,kBAAkB,CAAC,KAAa,EAAE,MAAoB;IACpE,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AACpC,CAAC;AAFD,gDAEC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/** The supported character encoding type */\nexport type EncodingType = \"utf-8\" | \"base64\" | \"base64url\" | \"hex\";\n\n/**\n * The helper that transforms bytes with specific character encoding into string\n * @param bytes - the uint8array bytes\n * @param format - the format we use to encode the byte\n * @returns a string of the encoded string\n */\nexport function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string {\n return Buffer.from(bytes).toString(format);\n}\n\n/**\n * The helper that transforms string to specific character encoded bytes array.\n * @param value - the string to be converted\n * @param format - the format we use to decode the value\n * @returns a uint8array\n */\nexport function stringToUint8Array(value: string, format: EncodingType): Uint8Array {\n return Buffer.from(value, format);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.d.ts b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.d.ts new file mode 100644 index 0000000..63273c1 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.d.ts @@ -0,0 +1,25 @@ +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +export declare const isBrowser: boolean; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export declare const isWebWorker: boolean; +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export declare const isDeno: boolean; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export declare const isBun: boolean; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export declare const isNode: boolean; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +export declare const isReactNative: boolean; +//# sourceMappingURL=checkEnvironment.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.d.ts.map new file mode 100644 index 0000000..ccff768 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.d.ts","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAoCA;;GAEG;AAEH,eAAO,MAAM,SAAS,SAA0E,CAAC;AAEjG;;GAEG;AACH,eAAO,MAAM,WAAW,SAKiC,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,MAAM,SAGuB,CAAC;AAE3C;;GAEG;AACH,eAAO,MAAM,KAAK,SAAmE,CAAC;AAEtF;;GAEG;AACH,eAAO,MAAM,MAAM,SAMX,CAAC;AAET;;GAEG;AAEH,eAAO,MAAM,aAAa,SACgD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js new file mode 100644 index 0000000..7cfd4f5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js @@ -0,0 +1,43 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isReactNative = exports.isNode = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +exports.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +exports.isWebWorker = typeof self === "object" && + typeof self?.importScripts === "function" && + (self.constructor?.name === "DedicatedWorkerGlobalScope" || + self.constructor?.name === "ServiceWorkerGlobalScope" || + self.constructor?.name === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +exports.isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +exports.isNode = typeof globalThis.process !== "undefined" && + Boolean(globalThis.process.version) && + Boolean(globalThis.process.versions?.node) && + // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + !exports.isDeno && + !exports.isBun; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +exports.isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +//# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js.map b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js.map new file mode 100644 index 0000000..6a3533c --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.js","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAmClC;;GAEG;AACH,yDAAyD;AAC5C,QAAA,SAAS,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,WAAW,CAAC;AAEjG;;GAEG;AACU,QAAA,WAAW,GACtB,OAAO,IAAI,KAAK,QAAQ;IACxB,OAAO,IAAI,EAAE,aAAa,KAAK,UAAU;IACzC,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,4BAA4B;QACtD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,0BAA0B;QACrD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,yBAAyB,CAAC,CAAC;AAE1D;;GAEG;AACU,QAAA,MAAM,GACjB,OAAO,IAAI,KAAK,WAAW;IAC3B,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW;IACnC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,WAAW,CAAC;AAE3C;;GAEG;AACU,QAAA,KAAK,GAAG,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,WAAW,CAAC;AAEtF;;GAEG;AACU,QAAA,MAAM,GACjB,OAAO,UAAU,CAAC,OAAO,KAAK,WAAW;IACzC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC;IACnC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC;IAC1C,+HAA+H;IAC/H,CAAC,cAAM;IACP,CAAC,aAAK,CAAC;AAET;;GAEG;AACH,4GAA4G;AAC/F,QAAA,aAAa,GACxB,OAAO,SAAS,KAAK,WAAW,IAAI,SAAS,EAAE,OAAO,KAAK,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ninterface Window {\n document: unknown;\n}\n\ninterface DedicatedWorkerGlobalScope {\n constructor: {\n name: string;\n };\n\n importScripts: (...paths: string[]) => void;\n}\n\ninterface Navigator {\n product: string;\n}\n\ninterface DenoGlobal {\n version: {\n deno: string;\n };\n}\n\ninterface BunGlobal {\n version: string;\n}\n\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\ndeclare const window: Window;\ndeclare const self: DedicatedWorkerGlobalScope;\ndeclare const Deno: DenoGlobal;\ndeclare const Bun: BunGlobal;\ndeclare const navigator: Navigator;\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Browser.\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\nexport const isBrowser = typeof window !== \"undefined\" && typeof window.document !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Worker.\n */\nexport const isWebWorker =\n typeof self === \"object\" &&\n typeof self?.importScripts === \"function\" &&\n (self.constructor?.name === \"DedicatedWorkerGlobalScope\" ||\n self.constructor?.name === \"ServiceWorkerGlobalScope\" ||\n self.constructor?.name === \"SharedWorkerGlobalScope\");\n\n/**\n * A constant that indicates whether the environment the code is running is Deno.\n */\nexport const isDeno =\n typeof Deno !== \"undefined\" &&\n typeof Deno.version !== \"undefined\" &&\n typeof Deno.version.deno !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Bun.sh.\n */\nexport const isBun = typeof Bun !== \"undefined\" && typeof Bun.version !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Node.JS.\n */\nexport const isNode =\n typeof globalThis.process !== \"undefined\" &&\n Boolean(globalThis.process.version) &&\n Boolean(globalThis.process.versions?.node) &&\n // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions\n !isDeno &&\n !isBun;\n\n/**\n * A constant that indicates whether the environment the code is running is in React-Native.\n */\n// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js\nexport const isReactNative =\n typeof navigator !== \"undefined\" && navigator?.product === \"ReactNative\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.d.ts b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.d.ts new file mode 100644 index 0000000..183c705 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.d.ts @@ -0,0 +1,16 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for the createAbortablePromise function. + */ +export interface CreateAbortablePromiseOptions extends AbortOptions { + /** A function to be called if the promise was aborted */ + cleanupBeforeAbort?: () => void; +} +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export declare function createAbortablePromise(buildPromise: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void, options?: CreateAbortablePromiseOptions): Promise; +//# sourceMappingURL=createAbortablePromise.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.d.ts.map new file mode 100644 index 0000000..705d2e9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.d.ts","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAEtD;;GAEG;AACH,MAAM,WAAW,6BAA8B,SAAQ,YAAY;IACjE,yDAAyD;IACzD,kBAAkB,CAAC,EAAE,MAAM,IAAI,CAAC;CACjC;AAED;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,EACtC,YAAY,EAAE,CACZ,OAAO,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,KAAK,IAAI,EAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,KAC3B,IAAI,EACT,OAAO,CAAC,EAAE,6BAA6B,GACtC,OAAO,CAAC,CAAC,CAAC,CAiCZ"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js new file mode 100644 index 0000000..a3fdf1d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js @@ -0,0 +1,46 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createAbortablePromise = void 0; +const abort_controller_1 = require("@azure/abort-controller"); +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new abort_controller_1.AbortError(abortErrorMsg ?? "The operation was aborted.")); + } + function removeListeners() { + abortSignal?.removeEventListener("abort", onAbort); + } + function onAbort() { + cleanupBeforeAbort?.(); + removeListeners(); + rejectOnAbort(); + } + if (abortSignal?.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); + }); + } + catch (err) { + reject(err); + } + abortSignal?.addEventListener("abort", onAbort); + }); +} +exports.createAbortablePromise = createAbortablePromise; +//# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js.map b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js.map new file mode 100644 index 0000000..ce08d93 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.js","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,8DAAqD;AAWrD;;;;;GAKG;AACH,SAAgB,sBAAsB,CACpC,YAGS,EACT,OAAuC;IAEvC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACzE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,SAAS,aAAa;YACpB,MAAM,CAAC,IAAI,6BAAU,CAAC,aAAa,IAAI,4BAA4B,CAAC,CAAC,CAAC;QACxE,CAAC;QACD,SAAS,eAAe;YACtB,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACrD,CAAC;QACD,SAAS,OAAO;YACd,kBAAkB,EAAE,EAAE,CAAC;YACvB,eAAe,EAAE,CAAC;YAClB,aAAa,EAAE,CAAC;QAClB,CAAC;QACD,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;YACzB,OAAO,aAAa,EAAE,CAAC;QACzB,CAAC;QACD,IAAI,CAAC;YACH,YAAY,CACV,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,OAAO,CAAC,CAAC,CAAC,CAAC;YACb,CAAC,EACD,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,MAAM,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC,CACF,CAAC;QACJ,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC;QACD,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAClD,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,wDAuCC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\nimport type { AbortOptions } from \"./aborterUtils.js\";\n\n/**\n * Options for the createAbortablePromise function.\n */\nexport interface CreateAbortablePromiseOptions extends AbortOptions {\n /** A function to be called if the promise was aborted */\n cleanupBeforeAbort?: () => void;\n}\n\n/**\n * Creates an abortable promise.\n * @param buildPromise - A function that takes the resolve and reject functions as parameters.\n * @param options - The options for the abortable promise.\n * @returns A promise that can be aborted.\n */\nexport function createAbortablePromise(\n buildPromise: (\n resolve: (value: T | PromiseLike) => void,\n reject: (reason?: any) => void,\n ) => void,\n options?: CreateAbortablePromiseOptions,\n): Promise {\n const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {};\n return new Promise((resolve, reject) => {\n function rejectOnAbort(): void {\n reject(new AbortError(abortErrorMsg ?? \"The operation was aborted.\"));\n }\n function removeListeners(): void {\n abortSignal?.removeEventListener(\"abort\", onAbort);\n }\n function onAbort(): void {\n cleanupBeforeAbort?.();\n removeListeners();\n rejectOnAbort();\n }\n if (abortSignal?.aborted) {\n return rejectOnAbort();\n }\n try {\n buildPromise(\n (x) => {\n removeListeners();\n resolve(x);\n },\n (x) => {\n removeListeners();\n reject(x);\n },\n );\n } catch (err) {\n reject(err);\n }\n abortSignal?.addEventListener(\"abort\", onAbort);\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/delay.d.ts b/node_modules/@azure/core-util/dist/commonjs/delay.d.ts new file mode 100644 index 0000000..9dc4b5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/delay.d.ts @@ -0,0 +1,14 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for support abort functionality for the delay method + */ +export interface DelayOptions extends AbortOptions { +} +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export declare function delay(timeInMs: number, options?: DelayOptions): Promise; +//# sourceMappingURL=delay.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/delay.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/delay.d.ts.map new file mode 100644 index 0000000..8d2d31b --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/delay.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAKtD;;GAEG;AACH,MAAM,WAAW,YAAa,SAAQ,YAAY;CAAG;AAErD;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/delay.js b/node_modules/@azure/core-util/dist/commonjs/delay.js new file mode 100644 index 0000000..0183ef6 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/delay.js @@ -0,0 +1,26 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.delay = void 0; +const createAbortablePromise_js_1 = require("./createAbortablePromise.js"); +const StandardAbortMessage = "The delay was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options ?? {}; + return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + }); +} +exports.delay = delay; +//# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/delay.js.map b/node_modules/@azure/core-util/dist/commonjs/delay.js.map new file mode 100644 index 0000000..d159c60 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/delay.js.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.js","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAGlC,2EAAqE;AAErE,MAAM,oBAAoB,GAAG,wBAAwB,CAAC;AAOtD;;;;;GAKG;AACH,SAAgB,KAAK,CAAC,QAAgB,EAAE,OAAsB;IAC5D,IAAI,KAAoC,CAAC;IACzC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrD,OAAO,IAAA,kDAAsB,EAC3B,CAAC,OAAO,EAAE,EAAE;QACV,KAAK,GAAG,UAAU,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;IACxC,CAAC,EACD;QACE,kBAAkB,EAAE,GAAG,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC;QAC7C,WAAW;QACX,aAAa,EAAE,aAAa,IAAI,oBAAoB;KACrD,CACF,CAAC;AACJ,CAAC;AAbD,sBAaC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortOptions } from \"./aborterUtils.js\";\nimport { createAbortablePromise } from \"./createAbortablePromise.js\";\n\nconst StandardAbortMessage = \"The delay was aborted.\";\n\n/**\n * Options for support abort functionality for the delay method\n */\nexport interface DelayOptions extends AbortOptions {}\n\n/**\n * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds.\n * @param timeInMs - The number of milliseconds to be delayed.\n * @param options - The options for delay - currently abort options\n * @returns Promise that is resolved after timeInMs\n */\nexport function delay(timeInMs: number, options?: DelayOptions): Promise {\n let token: ReturnType;\n const { abortSignal, abortErrorMsg } = options ?? {};\n return createAbortablePromise(\n (resolve) => {\n token = setTimeout(resolve, timeInMs);\n },\n {\n cleanupBeforeAbort: () => clearTimeout(token),\n abortSignal,\n abortErrorMsg: abortErrorMsg ?? StandardAbortMessage,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/error.d.ts b/node_modules/@azure/core-util/dist/commonjs/error.d.ts new file mode 100644 index 0000000..5027f12 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/error.d.ts @@ -0,0 +1,13 @@ +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export declare function isError(e: unknown): e is Error; +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export declare function getErrorMessage(e: unknown): string; +//# sourceMappingURL=error.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/error.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/error.d.ts.map new file mode 100644 index 0000000..d56797d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"error.d.ts","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,wBAAgB,OAAO,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,KAAK,CAO9C;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,CAAC,EAAE,OAAO,GAAG,MAAM,CAgBlD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/error.js b/node_modules/@azure/core-util/dist/commonjs/error.js new file mode 100644 index 0000000..a88d53d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/error.js @@ -0,0 +1,47 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getErrorMessage = exports.isError = void 0; +const object_js_1 = require("./object.js"); +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +function isError(e) { + if ((0, object_js_1.isObject)(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; +} +exports.isError = isError; +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +function getErrorMessage(e) { + if (isError(e)) { + return e.message; + } + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } + else { + stringified = String(e); + } + } + catch (err) { + stringified = "[unable to stringify input]"; + } + return `Unknown error ${stringified}`; + } +} +exports.getErrorMessage = getErrorMessage; +//# sourceMappingURL=error.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/error.js.map b/node_modules/@azure/core-util/dist/commonjs/error.js.map new file mode 100644 index 0000000..b4e7363 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"error.js","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,2CAAuC;AAEvC;;;GAGG;AACH,SAAgB,OAAO,CAAC,CAAU;IAChC,IAAI,IAAA,oBAAQ,EAAC,CAAC,CAAC,EAAE,CAAC;QAChB,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC;QAC3C,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,OAAO,KAAK,QAAQ,CAAC;QACjD,OAAO,OAAO,IAAI,UAAU,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAPD,0BAOC;AAED;;;;;GAKG;AACH,SAAgB,eAAe,CAAC,CAAU;IACxC,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACf,OAAO,CAAC,CAAC,OAAO,CAAC;IACnB,CAAC;SAAM,CAAC;QACN,IAAI,WAAmB,CAAC;QACxB,IAAI,CAAC;YACH,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,EAAE,CAAC;gBAC/B,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;iBAAM,CAAC;gBACN,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAC1B,CAAC;QACH,CAAC;QAAC,OAAO,GAAQ,EAAE,CAAC;YAClB,WAAW,GAAG,6BAA6B,CAAC;QAC9C,CAAC;QACD,OAAO,iBAAiB,WAAW,EAAE,CAAC;IACxC,CAAC;AACH,CAAC;AAhBD,0CAgBC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObject } from \"./object.js\";\n\n/**\n * Typeguard for an error object shape (has name and message)\n * @param e - Something caught by a catch clause.\n */\nexport function isError(e: unknown): e is Error {\n if (isObject(e)) {\n const hasName = typeof e.name === \"string\";\n const hasMessage = typeof e.message === \"string\";\n return hasName && hasMessage;\n }\n return false;\n}\n\n/**\n * Given what is thought to be an error object, return the message if possible.\n * If the message is missing, returns a stringified version of the input.\n * @param e - Something thrown from a try block\n * @returns The error message or a string of the input\n */\nexport function getErrorMessage(e: unknown): string {\n if (isError(e)) {\n return e.message;\n } else {\n let stringified: string;\n try {\n if (typeof e === \"object\" && e) {\n stringified = JSON.stringify(e);\n } else {\n stringified = String(e);\n }\n } catch (err: any) {\n stringified = \"[unable to stringify input]\";\n }\n return `Unknown error ${stringified}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/index.d.ts b/node_modules/@azure/core-util/dist/commonjs/index.d.ts new file mode 100644 index 0000000..14fada5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/index.d.ts @@ -0,0 +1,12 @@ +export { delay, type DelayOptions } from "./delay.js"; +export { type AbortOptions, cancelablePromiseRace, type AbortablePromiseBuilder, } from "./aborterUtils.js"; +export { createAbortablePromise, type CreateAbortablePromiseOptions, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject, type UnknownObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array, type EncodingType } from "./bytesEncoding.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..77255da --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,KAAK,EAAE,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EACL,KAAK,YAAY,EACjB,qBAAqB,EACrB,KAAK,uBAAuB,GAC7B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,EACtB,KAAK,6BAA6B,GACnC,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,KAAK,YAAY,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/index.js b/node_modules/@azure/core-util/dist/commonjs/index.js new file mode 100644 index 0000000..11e2b23 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/index.js @@ -0,0 +1,38 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.delay = void 0; +var delay_js_1 = require("./delay.js"); +Object.defineProperty(exports, "delay", { enumerable: true, get: function () { return delay_js_1.delay; } }); +var aborterUtils_js_1 = require("./aborterUtils.js"); +Object.defineProperty(exports, "cancelablePromiseRace", { enumerable: true, get: function () { return aborterUtils_js_1.cancelablePromiseRace; } }); +var createAbortablePromise_js_1 = require("./createAbortablePromise.js"); +Object.defineProperty(exports, "createAbortablePromise", { enumerable: true, get: function () { return createAbortablePromise_js_1.createAbortablePromise; } }); +var random_js_1 = require("./random.js"); +Object.defineProperty(exports, "getRandomIntegerInclusive", { enumerable: true, get: function () { return random_js_1.getRandomIntegerInclusive; } }); +var object_js_1 = require("./object.js"); +Object.defineProperty(exports, "isObject", { enumerable: true, get: function () { return object_js_1.isObject; } }); +var error_js_1 = require("./error.js"); +Object.defineProperty(exports, "isError", { enumerable: true, get: function () { return error_js_1.isError; } }); +Object.defineProperty(exports, "getErrorMessage", { enumerable: true, get: function () { return error_js_1.getErrorMessage; } }); +var sha256_js_1 = require("./sha256.js"); +Object.defineProperty(exports, "computeSha256Hash", { enumerable: true, get: function () { return sha256_js_1.computeSha256Hash; } }); +Object.defineProperty(exports, "computeSha256Hmac", { enumerable: true, get: function () { return sha256_js_1.computeSha256Hmac; } }); +var typeGuards_js_1 = require("./typeGuards.js"); +Object.defineProperty(exports, "isDefined", { enumerable: true, get: function () { return typeGuards_js_1.isDefined; } }); +Object.defineProperty(exports, "isObjectWithProperties", { enumerable: true, get: function () { return typeGuards_js_1.isObjectWithProperties; } }); +Object.defineProperty(exports, "objectHasProperty", { enumerable: true, get: function () { return typeGuards_js_1.objectHasProperty; } }); +var uuidUtils_js_1 = require("./uuidUtils.js"); +Object.defineProperty(exports, "randomUUID", { enumerable: true, get: function () { return uuidUtils_js_1.randomUUID; } }); +var checkEnvironment_js_1 = require("./checkEnvironment.js"); +Object.defineProperty(exports, "isBrowser", { enumerable: true, get: function () { return checkEnvironment_js_1.isBrowser; } }); +Object.defineProperty(exports, "isBun", { enumerable: true, get: function () { return checkEnvironment_js_1.isBun; } }); +Object.defineProperty(exports, "isNode", { enumerable: true, get: function () { return checkEnvironment_js_1.isNode; } }); +Object.defineProperty(exports, "isDeno", { enumerable: true, get: function () { return checkEnvironment_js_1.isDeno; } }); +Object.defineProperty(exports, "isReactNative", { enumerable: true, get: function () { return checkEnvironment_js_1.isReactNative; } }); +Object.defineProperty(exports, "isWebWorker", { enumerable: true, get: function () { return checkEnvironment_js_1.isWebWorker; } }); +var bytesEncoding_js_1 = require("./bytesEncoding.js"); +Object.defineProperty(exports, "uint8ArrayToString", { enumerable: true, get: function () { return bytesEncoding_js_1.uint8ArrayToString; } }); +Object.defineProperty(exports, "stringToUint8Array", { enumerable: true, get: function () { return bytesEncoding_js_1.stringToUint8Array; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/index.js.map b/node_modules/@azure/core-util/dist/commonjs/index.js.map new file mode 100644 index 0000000..5dfd4db --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,uCAAsD;AAA7C,iGAAA,KAAK,OAAA;AACd,qDAI2B;AAFzB,wHAAA,qBAAqB,OAAA;AAGvB,yEAGqC;AAFnC,mIAAA,sBAAsB,OAAA;AAGxB,yCAAwD;AAA/C,sHAAA,yBAAyB,OAAA;AAClC,yCAA2D;AAAlD,qGAAA,QAAQ,OAAA;AACjB,uCAAsD;AAA7C,mGAAA,OAAO,OAAA;AAAE,2GAAA,eAAe,OAAA;AACjC,yCAAmE;AAA1D,8GAAA,iBAAiB,OAAA;AAAE,8GAAA,iBAAiB,OAAA;AAC7C,iDAAuF;AAA9E,0GAAA,SAAS,OAAA;AAAE,uHAAA,sBAAsB,OAAA;AAAE,kHAAA,iBAAiB,OAAA;AAC7D,+CAA4C;AAAnC,0GAAA,UAAU,OAAA;AACnB,6DAO+B;AAN7B,gHAAA,SAAS,OAAA;AACT,4GAAA,KAAK,OAAA;AACL,6GAAA,MAAM,OAAA;AACN,6GAAA,MAAM,OAAA;AACN,oHAAA,aAAa,OAAA;AACb,kHAAA,WAAW,OAAA;AAEb,uDAA+F;AAAtF,sHAAA,kBAAkB,OAAA;AAAE,sHAAA,kBAAkB,OAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { delay, type DelayOptions } from \"./delay.js\";\nexport {\n type AbortOptions,\n cancelablePromiseRace,\n type AbortablePromiseBuilder,\n} from \"./aborterUtils.js\";\nexport {\n createAbortablePromise,\n type CreateAbortablePromiseOptions,\n} from \"./createAbortablePromise.js\";\nexport { getRandomIntegerInclusive } from \"./random.js\";\nexport { isObject, type UnknownObject } from \"./object.js\";\nexport { isError, getErrorMessage } from \"./error.js\";\nexport { computeSha256Hash, computeSha256Hmac } from \"./sha256.js\";\nexport { isDefined, isObjectWithProperties, objectHasProperty } from \"./typeGuards.js\";\nexport { randomUUID } from \"./uuidUtils.js\";\nexport {\n isBrowser,\n isBun,\n isNode,\n isDeno,\n isReactNative,\n isWebWorker,\n} from \"./checkEnvironment.js\";\nexport { uint8ArrayToString, stringToUint8Array, type EncodingType } from \"./bytesEncoding.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/object.d.ts b/node_modules/@azure/core-util/dist/commonjs/object.d.ts new file mode 100644 index 0000000..fc3f33a --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/object.d.ts @@ -0,0 +1,12 @@ +/** + * A generic shape for a plain JS object. + */ +export type UnknownObject = { + [s: string]: unknown; +}; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=object.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/object.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/object.d.ts.map new file mode 100644 index 0000000..66b39bd --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/object.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"object.d.ts","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/object.js b/node_modules/@azure/core-util/dist/commonjs/object.js new file mode 100644 index 0000000..a42af67 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/object.js @@ -0,0 +1,18 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isObject = void 0; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +exports.isObject = isObject; +//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/object.js.map b/node_modules/@azure/core-util/dist/commonjs/object.js.map new file mode 100644 index 0000000..20acc72 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/object.js.map @@ -0,0 +1 @@ +{"version":3,"file":"object.js","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAOlC;;;GAGG;AACH,SAAgB,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC;AARD,4BAQC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A generic shape for a plain JS object.\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * Helper to determine when an input is a generic JS object.\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/package.json b/node_modules/@azure/core-util/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-util/dist/commonjs/random.d.ts b/node_modules/@azure/core-util/dist/commonjs/random.d.ts new file mode 100644 index 0000000..9e9631a --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/random.d.ts @@ -0,0 +1,10 @@ +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export declare function getRandomIntegerInclusive(min: number, max: number): number; +//# sourceMappingURL=random.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/random.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/random.d.ts.map new file mode 100644 index 0000000..8a91316 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/random.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"random.d.ts","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAGA;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,MAAM,CAS1E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/random.js b/node_modules/@azure/core-util/dist/commonjs/random.js new file mode 100644 index 0000000..5ec7e4d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/random.js @@ -0,0 +1,25 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRandomIntegerInclusive = void 0; +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; +} +exports.getRandomIntegerInclusive = getRandomIntegerInclusive; +//# sourceMappingURL=random.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/random.js.map b/node_modules/@azure/core-util/dist/commonjs/random.js.map new file mode 100644 index 0000000..9de05d4 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/random.js.map @@ -0,0 +1 @@ +{"version":3,"file":"random.js","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;;;;;;GAOG;AACH,SAAgB,yBAAyB,CAAC,GAAW,EAAE,GAAW;IAChE,iCAAiC;IACjC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACrB,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACtB,2DAA2D;IAC3D,+EAA+E;IAC/E,yEAAyE;IACzE,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;IAC3D,OAAO,MAAM,GAAG,GAAG,CAAC;AACtB,CAAC;AATD,8DASC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Returns a random integer value between a lower and upper bound,\n * inclusive of both bounds.\n * Note that this uses Math.random and isn't secure. If you need to use\n * this for any kind of security purpose, find a better source of random.\n * @param min - The smallest integer value allowed.\n * @param max - The largest integer value allowed.\n */\nexport function getRandomIntegerInclusive(min: number, max: number): number {\n // Make sure inputs are integers.\n min = Math.ceil(min);\n max = Math.floor(max);\n // Pick a random offset from zero to the size of the range.\n // Since Math.random() can never return 1, we have to make the range one larger\n // in order to be inclusive of the maximum value after we take the floor.\n const offset = Math.floor(Math.random() * (max - min + 1));\n return offset + min;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/sha256.d.ts b/node_modules/@azure/core-util/dist/commonjs/sha256.d.ts new file mode 100644 index 0000000..a4b7b98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/sha256.d.ts @@ -0,0 +1,14 @@ +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export declare function computeSha256Hmac(key: string, stringToSign: string, encoding: "base64" | "hex"): Promise; +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export declare function computeSha256Hash(content: string, encoding: "base64" | "hex"): Promise; +//# sourceMappingURL=sha256.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/sha256.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/sha256.d.ts.map new file mode 100644 index 0000000..fffa8fc --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/sha256.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256.d.ts","sourceRoot":"","sources":["../../src/sha256.ts"],"names":[],"mappings":"AAKA;;;;;GAKG;AACH,wBAAsB,iBAAiB,CACrC,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,MAAM,EACpB,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAIjB;AAED;;;;GAIG;AACH,wBAAsB,iBAAiB,CACrC,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAEjB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/sha256.js b/node_modules/@azure/core-util/dist/commonjs/sha256.js new file mode 100644 index 0000000..5d3a160 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/sha256.js @@ -0,0 +1,27 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.computeSha256Hash = exports.computeSha256Hmac = void 0; +const crypto_1 = require("crypto"); +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return (0, crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); +} +exports.computeSha256Hmac = computeSha256Hmac; +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +async function computeSha256Hash(content, encoding) { + return (0, crypto_1.createHash)("sha256").update(content).digest(encoding); +} +exports.computeSha256Hash = computeSha256Hash; +//# sourceMappingURL=sha256.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/sha256.js.map b/node_modules/@azure/core-util/dist/commonjs/sha256.js.map new file mode 100644 index 0000000..c23c4a2 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/sha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256.js","sourceRoot":"","sources":["../../src/sha256.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,mCAAgD;AAEhD;;;;;GAKG;AACI,KAAK,UAAU,iBAAiB,CACrC,GAAW,EACX,YAAoB,EACpB,QAA0B;IAE1B,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;IAE9C,OAAO,IAAA,mBAAU,EAAC,QAAQ,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAChF,CAAC;AARD,8CAQC;AAED;;;;GAIG;AACI,KAAK,UAAU,iBAAiB,CACrC,OAAe,EACf,QAA0B;IAE1B,OAAO,IAAA,mBAAU,EAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAC/D,CAAC;AALD,8CAKC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHash, createHmac } from \"crypto\";\n\n/**\n * Generates a SHA-256 HMAC signature.\n * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash.\n * @param stringToSign - The data to be signed.\n * @param encoding - The textual encoding to use for the returned HMAC digest.\n */\nexport async function computeSha256Hmac(\n key: string,\n stringToSign: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n const decodedKey = Buffer.from(key, \"base64\");\n\n return createHmac(\"sha256\", decodedKey).update(stringToSign).digest(encoding);\n}\n\n/**\n * Generates a SHA-256 hash.\n * @param content - The data to be included in the hash.\n * @param encoding - The textual encoding to use for the returned hash.\n */\nexport async function computeSha256Hash(\n content: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n return createHash(\"sha256\").update(content).digest(encoding);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-util/dist/commonjs/typeGuards.d.ts b/node_modules/@azure/core-util/dist/commonjs/typeGuards.d.ts new file mode 100644 index 0000000..50ccc5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/typeGuards.d.ts @@ -0,0 +1,18 @@ +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export declare function isObjectWithProperties(thing: Thing, properties: PropertyName[]): thing is Thing & Record; +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export declare function objectHasProperty(thing: Thing, property: PropertyName): thing is Thing & Record; +//# sourceMappingURL=typeGuards.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/typeGuards.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/typeGuards.d.ts.map new file mode 100644 index 0000000..f20f041 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/typeGuards.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.d.ts","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,CAEpE;AAED;;;;GAIG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EACvE,KAAK,EAAE,KAAK,EACZ,UAAU,EAAE,YAAY,EAAE,GACzB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAYhD;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EAClE,KAAK,EAAE,KAAK,EACZ,QAAQ,EAAE,YAAY,GACrB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAIhD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/typeGuards.js b/node_modules/@azure/core-util/dist/commonjs/typeGuards.js new file mode 100644 index 0000000..31d998c --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/typeGuards.js @@ -0,0 +1,40 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = void 0; +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +exports.isDefined = isDefined; +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +exports.isObjectWithProperties = isObjectWithProperties; +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} +exports.objectHasProperty = objectHasProperty; +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/typeGuards.js.map b/node_modules/@azure/core-util/dist/commonjs/typeGuards.js.map new file mode 100644 index 0000000..84a0675 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/typeGuards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.js","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;;GAGG;AACH,SAAgB,SAAS,CAAI,KAA2B;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAFD,8BAEC;AAED;;;;GAIG;AACH,SAAgB,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;QACnD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE,CAAC;QAClC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACxC,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAfD,wDAeC;AAED;;;;GAIG;AACH,SAAgB,iBAAiB,CAC/B,KAAY,EACZ,QAAsB;IAEtB,OAAO,CACL,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAChG,CAAC;AACJ,CAAC;AAPD,8CAOC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[],\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n */\nexport function objectHasProperty(\n thing: Thing,\n property: PropertyName,\n): thing is Thing & Record {\n return (\n isDefined(thing) && typeof thing === \"object\" && property in (thing as Record)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.d.ts b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.d.ts new file mode 100644 index 0000000..8f1c9ba --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.d.ts @@ -0,0 +1,13 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUUID(): string; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.d.ts.map new file mode 100644 index 0000000..0d19e4a --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.d.ts","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAqBrC;AAED;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.js b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.js new file mode 100644 index 0000000..63ea12e --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.js @@ -0,0 +1,45 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.randomUUID = exports.generateUUID = void 0; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +function generateUUID() { + let uuid = ""; + for (let i = 0; i < 32; i++) { + // Generate a random number between 0 and 15 + const randomNumber = Math.floor(Math.random() * 16); + // Set the UUID version to 4 in the 13th position + if (i === 12) { + uuid += "4"; + } + else if (i === 16) { + // Set the UUID variant to "10" in the 17th position + uuid += (randomNumber & 0x3) | 0x8; + } + else { + // Add a random hexadecimal digit to the UUID string + uuid += randomNumber.toString(16); + } + // Add hyphens to the UUID string at the appropriate positions + if (i === 7 || i === 11 || i === 15 || i === 19) { + uuid += "-"; + } + } + return uuid; +} +exports.generateUUID = generateUUID; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +function randomUUID() { + return generateUUID(); +} +exports.randomUUID = randomUUID; +//# sourceMappingURL=uuidUtils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.js.map b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.js.map new file mode 100644 index 0000000..0b4d0f4 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.js","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;;;GAIG;AACH,SAAgB,YAAY;IAC1B,IAAI,IAAI,GAAG,EAAE,CAAC;IACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5B,4CAA4C;QAC5C,MAAM,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC;QACpD,iDAAiD;QACjD,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACb,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;aAAM,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACpB,oDAAoD;YACpD,IAAI,IAAI,CAAC,YAAY,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QACrC,CAAC;aAAM,CAAC;YACN,oDAAoD;YACpD,IAAI,IAAI,YAAY,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;QACD,8DAA8D;QAC9D,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YAChD,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AArBD,oCAqBC;AAED;;;;GAIG;AACH,SAAgB,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC;AAFD,gCAEC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUUID(): string {\n let uuid = \"\";\n for (let i = 0; i < 32; i++) {\n // Generate a random number between 0 and 15\n const randomNumber = Math.floor(Math.random() * 16);\n // Set the UUID version to 4 in the 13th position\n if (i === 12) {\n uuid += \"4\";\n } else if (i === 16) {\n // Set the UUID variant to \"10\" in the 17th position\n uuid += (randomNumber & 0x3) | 0x8;\n } else {\n // Add a random hexadecimal digit to the UUID string\n uuid += randomNumber.toString(16);\n }\n // Add hyphens to the UUID string at the appropriate positions\n if (i === 7 || i === 11 || i === 15 || i === 19) {\n uuid += \"-\";\n }\n }\n return uuid;\n}\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return generateUUID();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.d.ts b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.d.ts new file mode 100644 index 0000000..f510a4b --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.d.ts @@ -0,0 +1,7 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.d.ts.map b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.d.ts.map new file mode 100644 index 0000000..b3d1b5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.d.ts","sourceRoot":"","sources":["../../src/uuidUtils.ts"],"names":[],"mappings":"AAmBA;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js new file mode 100644 index 0000000..ae5f106 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js @@ -0,0 +1,20 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.randomUUID = void 0; +const crypto_1 = require("crypto"); +// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. +const uuidFunction = typeof globalThis?.crypto?.randomUUID === "function" + ? globalThis.crypto.randomUUID.bind(globalThis.crypto) + : crypto_1.randomUUID; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +function randomUUID() { + return uuidFunction(); +} +exports.randomUUID = randomUUID; +//# sourceMappingURL=uuidUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js.map b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js.map new file mode 100644 index 0000000..bf2e583 --- /dev/null +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.js","sourceRoot":"","sources":["../../src/uuidUtils.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC,mCAAoD;AAUpD,6FAA6F;AAC7F,MAAM,YAAY,GAChB,OAAO,UAAU,EAAE,MAAM,EAAE,UAAU,KAAK,UAAU;IAClD,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IACtD,CAAC,CAAC,mBAAY,CAAC;AAEnB;;;;GAIG;AACH,SAAgB,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC;AAFD,gCAEC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { randomUUID as v4RandomUUID } from \"crypto\";\n\ninterface Crypto {\n randomUUID(): string;\n}\n\ndeclare const globalThis: {\n crypto: Crypto;\n};\n\n// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+.\nconst uuidFunction =\n typeof globalThis?.crypto?.randomUUID === \"function\"\n ? globalThis.crypto.randomUUID.bind(globalThis.crypto)\n : v4RandomUUID;\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return uuidFunction();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/core-util.d.ts b/node_modules/@azure/core-util/dist/core-util.d.ts new file mode 100644 index 0000000..0bdd31c --- /dev/null +++ b/node_modules/@azure/core-util/dist/core-util.d.ts @@ -0,0 +1,189 @@ +import type { AbortSignalLike } from '@azure/abort-controller'; + +/** + * Represents a function that returns a promise that can be aborted. + */ +export declare type AbortablePromiseBuilder = (abortOptions: { + abortSignal?: AbortSignalLike; +}) => Promise; + +/** + * Options related to abort controller. + */ +export declare interface AbortOptions { + /** + * The abortSignal associated with containing operation. + */ + abortSignal?: AbortSignalLike; + /** + * The abort error message associated with containing operation. + */ + abortErrorMsg?: string; +} + +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export declare function cancelablePromiseRace(abortablePromiseBuilders: AbortablePromiseBuilder[], options?: { + abortSignal?: AbortSignalLike; +}): Promise; + +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export declare function computeSha256Hash(content: string, encoding: "base64" | "hex"): Promise; + +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export declare function computeSha256Hmac(key: string, stringToSign: string, encoding: "base64" | "hex"): Promise; + +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export declare function createAbortablePromise(buildPromise: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void, options?: CreateAbortablePromiseOptions): Promise; + +/** + * Options for the createAbortablePromise function. + */ +export declare interface CreateAbortablePromiseOptions extends AbortOptions { + /** A function to be called if the promise was aborted */ + cleanupBeforeAbort?: () => void; +} + +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export declare function delay(timeInMs: number, options?: DelayOptions): Promise; + +/** + * Options for support abort functionality for the delay method + */ +export declare interface DelayOptions extends AbortOptions { +} + +/** The supported character encoding type */ +export declare type EncodingType = "utf-8" | "base64" | "base64url" | "hex"; + +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export declare function getErrorMessage(e: unknown): string; + +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export declare function getRandomIntegerInclusive(min: number, max: number): number; + +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +export declare const isBrowser: boolean; + +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export declare const isBun: boolean; + +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export declare function isDefined(thing: T | undefined | null): thing is T; + +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export declare const isDeno: boolean; + +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export declare function isError(e: unknown): e is Error; + +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export declare const isNode: boolean; + +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; + +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export declare function isObjectWithProperties(thing: Thing, properties: PropertyName[]): thing is Thing & Record; + +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +export declare const isReactNative: boolean; + +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export declare const isWebWorker: boolean; + +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export declare function objectHasProperty(thing: Thing, property: PropertyName): thing is Thing & Record; + +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; + +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export declare function stringToUint8Array(value: string, format: EncodingType): Uint8Array; + +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export declare function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string; + +/** + * A generic shape for a plain JS object. + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; + +export { } diff --git a/node_modules/@azure/core-util/dist/esm/aborterUtils.d.ts b/node_modules/@azure/core-util/dist/esm/aborterUtils.d.ts new file mode 100644 index 0000000..89a2f98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/aborterUtils.d.ts @@ -0,0 +1,27 @@ +import type { AbortSignalLike } from "@azure/abort-controller"; +/** + * Options related to abort controller. + */ +export interface AbortOptions { + /** + * The abortSignal associated with containing operation. + */ + abortSignal?: AbortSignalLike; + /** + * The abort error message associated with containing operation. + */ + abortErrorMsg?: string; +} +/** + * Represents a function that returns a promise that can be aborted. + */ +export type AbortablePromiseBuilder = (abortOptions: { + abortSignal?: AbortSignalLike; +}) => Promise; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export declare function cancelablePromiseRace(abortablePromiseBuilders: AbortablePromiseBuilder[], options?: { + abortSignal?: AbortSignalLike; +}): Promise; +//# sourceMappingURL=aborterUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/aborterUtils.d.ts.map b/node_modules/@azure/core-util/dist/esm/aborterUtils.d.ts.map new file mode 100644 index 0000000..e7f705b --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/aborterUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.d.ts","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,MAAM,uBAAuB,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE;IACtD,WAAW,CAAC,EAAE,eAAe,CAAC;CAC/B,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC;AAEjB;;GAEG;AACH,wBAAsB,qBAAqB,CAAC,CAAC,SAAS,OAAO,EAAE,EAC7D,wBAAwB,EAAE,uBAAuB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,EAC9D,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,GAC1C,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAcpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/aborterUtils.js b/node_modules/@azure/core-util/dist/esm/aborterUtils.js new file mode 100644 index 0000000..c34a95d --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/aborterUtils.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export async function cancelablePromiseRace(abortablePromiseBuilders, options) { + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); + } + options?.abortSignal?.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + options?.abortSignal?.removeEventListener("abort", abortHandler); + } +} +//# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/aborterUtils.js.map b/node_modules/@azure/core-util/dist/esm/aborterUtils.js.map new file mode 100644 index 0000000..ff71dce --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/aborterUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.js","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAyBlC;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,wBAA8D,EAC9D,OAA2C;IAE3C,MAAM,OAAO,GAAG,IAAI,eAAe,EAAE,CAAC;IACtC,SAAS,YAAY;QACnB,OAAO,CAAC,KAAK,EAAE,CAAC;IAClB,CAAC;IACD,OAAO,EAAE,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IAC9D,IAAI,CAAC;QACH,OAAO,MAAM,OAAO,CAAC,IAAI,CACvB,wBAAwB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CACxE,CAAC;IACJ,CAAC;YAAS,CAAC;QACT,OAAO,CAAC,KAAK,EAAE,CAAC;QAChB,OAAO,EAAE,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IACnE,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options related to abort controller.\n */\nexport interface AbortOptions {\n /**\n * The abortSignal associated with containing operation.\n */\n abortSignal?: AbortSignalLike;\n /**\n * The abort error message associated with containing operation.\n */\n abortErrorMsg?: string;\n}\n\n/**\n * Represents a function that returns a promise that can be aborted.\n */\nexport type AbortablePromiseBuilder = (abortOptions: {\n abortSignal?: AbortSignalLike;\n}) => Promise;\n\n/**\n * promise.race() wrapper that aborts rest of promises as soon as the first promise settles.\n */\nexport async function cancelablePromiseRace(\n abortablePromiseBuilders: AbortablePromiseBuilder[],\n options?: { abortSignal?: AbortSignalLike },\n): Promise {\n const aborter = new AbortController();\n function abortHandler(): void {\n aborter.abort();\n }\n options?.abortSignal?.addEventListener(\"abort\", abortHandler);\n try {\n return await Promise.race(\n abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal })),\n );\n } finally {\n aborter.abort();\n options?.abortSignal?.removeEventListener(\"abort\", abortHandler);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/bytesEncoding.d.ts b/node_modules/@azure/core-util/dist/esm/bytesEncoding.d.ts new file mode 100644 index 0000000..48a9754 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/bytesEncoding.d.ts @@ -0,0 +1,17 @@ +/** The supported character encoding type */ +export type EncodingType = "utf-8" | "base64" | "base64url" | "hex"; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export declare function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string; +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export declare function stringToUint8Array(value: string, format: EncodingType): Uint8Array; +//# sourceMappingURL=bytesEncoding.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/bytesEncoding.d.ts.map b/node_modules/@azure/core-util/dist/esm/bytesEncoding.d.ts.map new file mode 100644 index 0000000..700cf2c --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/bytesEncoding.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding.d.ts","sourceRoot":"","sources":["../../src/bytesEncoding.ts"],"names":[],"mappings":"AAGA,4CAA4C;AAC5C,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,QAAQ,GAAG,WAAW,GAAG,KAAK,CAAC;AAEpE;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAElF;AAED;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,GAAG,UAAU,CAElF"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/bytesEncoding.js b/node_modules/@azure/core-util/dist/esm/bytesEncoding.js new file mode 100644 index 0000000..a38cbeb --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/bytesEncoding.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} +//# sourceMappingURL=bytesEncoding.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/bytesEncoding.js.map b/node_modules/@azure/core-util/dist/esm/bytesEncoding.js.map new file mode 100644 index 0000000..2abf518 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/bytesEncoding.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding.js","sourceRoot":"","sources":["../../src/bytesEncoding.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAiB,EAAE,MAAoB;IACxE,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC7C,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAa,EAAE,MAAoB;IACpE,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AACpC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/** The supported character encoding type */\nexport type EncodingType = \"utf-8\" | \"base64\" | \"base64url\" | \"hex\";\n\n/**\n * The helper that transforms bytes with specific character encoding into string\n * @param bytes - the uint8array bytes\n * @param format - the format we use to encode the byte\n * @returns a string of the encoded string\n */\nexport function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string {\n return Buffer.from(bytes).toString(format);\n}\n\n/**\n * The helper that transforms string to specific character encoded bytes array.\n * @param value - the string to be converted\n * @param format - the format we use to decode the value\n * @returns a uint8array\n */\nexport function stringToUint8Array(value: string, format: EncodingType): Uint8Array {\n return Buffer.from(value, format);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/checkEnvironment.d.ts b/node_modules/@azure/core-util/dist/esm/checkEnvironment.d.ts new file mode 100644 index 0000000..63273c1 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/checkEnvironment.d.ts @@ -0,0 +1,25 @@ +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +export declare const isBrowser: boolean; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export declare const isWebWorker: boolean; +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export declare const isDeno: boolean; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export declare const isBun: boolean; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export declare const isNode: boolean; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +export declare const isReactNative: boolean; +//# sourceMappingURL=checkEnvironment.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/checkEnvironment.d.ts.map b/node_modules/@azure/core-util/dist/esm/checkEnvironment.d.ts.map new file mode 100644 index 0000000..ccff768 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/checkEnvironment.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.d.ts","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAoCA;;GAEG;AAEH,eAAO,MAAM,SAAS,SAA0E,CAAC;AAEjG;;GAEG;AACH,eAAO,MAAM,WAAW,SAKiC,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,MAAM,SAGuB,CAAC;AAE3C;;GAEG;AACH,eAAO,MAAM,KAAK,SAAmE,CAAC;AAEtF;;GAEG;AACH,eAAO,MAAM,MAAM,SAMX,CAAC;AAET;;GAEG;AAEH,eAAO,MAAM,aAAa,SACgD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/checkEnvironment.js b/node_modules/@azure/core-util/dist/esm/checkEnvironment.js new file mode 100644 index 0000000..53ba62e --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/checkEnvironment.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +export const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export const isWebWorker = typeof self === "object" && + typeof self?.importScripts === "function" && + (self.constructor?.name === "DedicatedWorkerGlobalScope" || + self.constructor?.name === "ServiceWorkerGlobalScope" || + self.constructor?.name === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export const isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export const isNode = typeof globalThis.process !== "undefined" && + Boolean(globalThis.process.version) && + Boolean(globalThis.process.versions?.node) && + // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + !isDeno && + !isBun; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +export const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +//# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/checkEnvironment.js.map b/node_modules/@azure/core-util/dist/esm/checkEnvironment.js.map new file mode 100644 index 0000000..5b1f90f --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/checkEnvironment.js.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.js","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAmClC;;GAEG;AACH,yDAAyD;AACzD,MAAM,CAAC,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,WAAW,CAAC;AAEjG;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GACtB,OAAO,IAAI,KAAK,QAAQ;IACxB,OAAO,IAAI,EAAE,aAAa,KAAK,UAAU;IACzC,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,4BAA4B;QACtD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,0BAA0B;QACrD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,yBAAyB,CAAC,CAAC;AAE1D;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,IAAI,KAAK,WAAW;IAC3B,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW;IACnC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,WAAW,CAAC;AAE3C;;GAEG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,WAAW,CAAC;AAEtF;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,UAAU,CAAC,OAAO,KAAK,WAAW;IACzC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC;IACnC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC;IAC1C,+HAA+H;IAC/H,CAAC,MAAM;IACP,CAAC,KAAK,CAAC;AAET;;GAEG;AACH,4GAA4G;AAC5G,MAAM,CAAC,MAAM,aAAa,GACxB,OAAO,SAAS,KAAK,WAAW,IAAI,SAAS,EAAE,OAAO,KAAK,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ninterface Window {\n document: unknown;\n}\n\ninterface DedicatedWorkerGlobalScope {\n constructor: {\n name: string;\n };\n\n importScripts: (...paths: string[]) => void;\n}\n\ninterface Navigator {\n product: string;\n}\n\ninterface DenoGlobal {\n version: {\n deno: string;\n };\n}\n\ninterface BunGlobal {\n version: string;\n}\n\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\ndeclare const window: Window;\ndeclare const self: DedicatedWorkerGlobalScope;\ndeclare const Deno: DenoGlobal;\ndeclare const Bun: BunGlobal;\ndeclare const navigator: Navigator;\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Browser.\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\nexport const isBrowser = typeof window !== \"undefined\" && typeof window.document !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Worker.\n */\nexport const isWebWorker =\n typeof self === \"object\" &&\n typeof self?.importScripts === \"function\" &&\n (self.constructor?.name === \"DedicatedWorkerGlobalScope\" ||\n self.constructor?.name === \"ServiceWorkerGlobalScope\" ||\n self.constructor?.name === \"SharedWorkerGlobalScope\");\n\n/**\n * A constant that indicates whether the environment the code is running is Deno.\n */\nexport const isDeno =\n typeof Deno !== \"undefined\" &&\n typeof Deno.version !== \"undefined\" &&\n typeof Deno.version.deno !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Bun.sh.\n */\nexport const isBun = typeof Bun !== \"undefined\" && typeof Bun.version !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Node.JS.\n */\nexport const isNode =\n typeof globalThis.process !== \"undefined\" &&\n Boolean(globalThis.process.version) &&\n Boolean(globalThis.process.versions?.node) &&\n // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions\n !isDeno &&\n !isBun;\n\n/**\n * A constant that indicates whether the environment the code is running is in React-Native.\n */\n// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js\nexport const isReactNative =\n typeof navigator !== \"undefined\" && navigator?.product === \"ReactNative\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/createAbortablePromise.d.ts b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.d.ts new file mode 100644 index 0000000..183c705 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.d.ts @@ -0,0 +1,16 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for the createAbortablePromise function. + */ +export interface CreateAbortablePromiseOptions extends AbortOptions { + /** A function to be called if the promise was aborted */ + cleanupBeforeAbort?: () => void; +} +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export declare function createAbortablePromise(buildPromise: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void, options?: CreateAbortablePromiseOptions): Promise; +//# sourceMappingURL=createAbortablePromise.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/createAbortablePromise.d.ts.map b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.d.ts.map new file mode 100644 index 0000000..705d2e9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.d.ts","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAEtD;;GAEG;AACH,MAAM,WAAW,6BAA8B,SAAQ,YAAY;IACjE,yDAAyD;IACzD,kBAAkB,CAAC,EAAE,MAAM,IAAI,CAAC;CACjC;AAED;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,EACtC,YAAY,EAAE,CACZ,OAAO,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,KAAK,IAAI,EAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,KAC3B,IAAI,EACT,OAAO,CAAC,EAAE,6BAA6B,GACtC,OAAO,CAAC,CAAC,CAAC,CAiCZ"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js new file mode 100644 index 0000000..fc54e7c --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new AbortError(abortErrorMsg ?? "The operation was aborted.")); + } + function removeListeners() { + abortSignal?.removeEventListener("abort", onAbort); + } + function onAbort() { + cleanupBeforeAbort?.(); + removeListeners(); + rejectOnAbort(); + } + if (abortSignal?.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); + }); + } + catch (err) { + reject(err); + } + abortSignal?.addEventListener("abort", onAbort); + }); +} +//# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js.map b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js.map new file mode 100644 index 0000000..6f928a9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.js","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAWrD;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,YAGS,EACT,OAAuC;IAEvC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACzE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,SAAS,aAAa;YACpB,MAAM,CAAC,IAAI,UAAU,CAAC,aAAa,IAAI,4BAA4B,CAAC,CAAC,CAAC;QACxE,CAAC;QACD,SAAS,eAAe;YACtB,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACrD,CAAC;QACD,SAAS,OAAO;YACd,kBAAkB,EAAE,EAAE,CAAC;YACvB,eAAe,EAAE,CAAC;YAClB,aAAa,EAAE,CAAC;QAClB,CAAC;QACD,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;YACzB,OAAO,aAAa,EAAE,CAAC;QACzB,CAAC;QACD,IAAI,CAAC;YACH,YAAY,CACV,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,OAAO,CAAC,CAAC,CAAC,CAAC;YACb,CAAC,EACD,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,MAAM,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC,CACF,CAAC;QACJ,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC;QACD,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAClD,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\nimport type { AbortOptions } from \"./aborterUtils.js\";\n\n/**\n * Options for the createAbortablePromise function.\n */\nexport interface CreateAbortablePromiseOptions extends AbortOptions {\n /** A function to be called if the promise was aborted */\n cleanupBeforeAbort?: () => void;\n}\n\n/**\n * Creates an abortable promise.\n * @param buildPromise - A function that takes the resolve and reject functions as parameters.\n * @param options - The options for the abortable promise.\n * @returns A promise that can be aborted.\n */\nexport function createAbortablePromise(\n buildPromise: (\n resolve: (value: T | PromiseLike) => void,\n reject: (reason?: any) => void,\n ) => void,\n options?: CreateAbortablePromiseOptions,\n): Promise {\n const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {};\n return new Promise((resolve, reject) => {\n function rejectOnAbort(): void {\n reject(new AbortError(abortErrorMsg ?? \"The operation was aborted.\"));\n }\n function removeListeners(): void {\n abortSignal?.removeEventListener(\"abort\", onAbort);\n }\n function onAbort(): void {\n cleanupBeforeAbort?.();\n removeListeners();\n rejectOnAbort();\n }\n if (abortSignal?.aborted) {\n return rejectOnAbort();\n }\n try {\n buildPromise(\n (x) => {\n removeListeners();\n resolve(x);\n },\n (x) => {\n removeListeners();\n reject(x);\n },\n );\n } catch (err) {\n reject(err);\n }\n abortSignal?.addEventListener(\"abort\", onAbort);\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/delay.d.ts b/node_modules/@azure/core-util/dist/esm/delay.d.ts new file mode 100644 index 0000000..9dc4b5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/delay.d.ts @@ -0,0 +1,14 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for support abort functionality for the delay method + */ +export interface DelayOptions extends AbortOptions { +} +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export declare function delay(timeInMs: number, options?: DelayOptions): Promise; +//# sourceMappingURL=delay.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/delay.d.ts.map b/node_modules/@azure/core-util/dist/esm/delay.d.ts.map new file mode 100644 index 0000000..8d2d31b --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/delay.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAKtD;;GAEG;AACH,MAAM,WAAW,YAAa,SAAQ,YAAY;CAAG;AAErD;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/delay.js b/node_modules/@azure/core-util/dist/esm/delay.js new file mode 100644 index 0000000..3e12ea5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/delay.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createAbortablePromise } from "./createAbortablePromise.js"; +const StandardAbortMessage = "The delay was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options ?? {}; + return createAbortablePromise((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + }); +} +//# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/delay.js.map b/node_modules/@azure/core-util/dist/esm/delay.js.map new file mode 100644 index 0000000..b02f90c --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/delay.js.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.js","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AAErE,MAAM,oBAAoB,GAAG,wBAAwB,CAAC;AAOtD;;;;;GAKG;AACH,MAAM,UAAU,KAAK,CAAC,QAAgB,EAAE,OAAsB;IAC5D,IAAI,KAAoC,CAAC;IACzC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrD,OAAO,sBAAsB,CAC3B,CAAC,OAAO,EAAE,EAAE;QACV,KAAK,GAAG,UAAU,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;IACxC,CAAC,EACD;QACE,kBAAkB,EAAE,GAAG,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC;QAC7C,WAAW;QACX,aAAa,EAAE,aAAa,IAAI,oBAAoB;KACrD,CACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortOptions } from \"./aborterUtils.js\";\nimport { createAbortablePromise } from \"./createAbortablePromise.js\";\n\nconst StandardAbortMessage = \"The delay was aborted.\";\n\n/**\n * Options for support abort functionality for the delay method\n */\nexport interface DelayOptions extends AbortOptions {}\n\n/**\n * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds.\n * @param timeInMs - The number of milliseconds to be delayed.\n * @param options - The options for delay - currently abort options\n * @returns Promise that is resolved after timeInMs\n */\nexport function delay(timeInMs: number, options?: DelayOptions): Promise {\n let token: ReturnType;\n const { abortSignal, abortErrorMsg } = options ?? {};\n return createAbortablePromise(\n (resolve) => {\n token = setTimeout(resolve, timeInMs);\n },\n {\n cleanupBeforeAbort: () => clearTimeout(token),\n abortSignal,\n abortErrorMsg: abortErrorMsg ?? StandardAbortMessage,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/error.d.ts b/node_modules/@azure/core-util/dist/esm/error.d.ts new file mode 100644 index 0000000..5027f12 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/error.d.ts @@ -0,0 +1,13 @@ +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export declare function isError(e: unknown): e is Error; +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export declare function getErrorMessage(e: unknown): string; +//# sourceMappingURL=error.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/error.d.ts.map b/node_modules/@azure/core-util/dist/esm/error.d.ts.map new file mode 100644 index 0000000..d56797d --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"error.d.ts","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,wBAAgB,OAAO,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,KAAK,CAO9C;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,CAAC,EAAE,OAAO,GAAG,MAAM,CAgBlD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/error.js b/node_modules/@azure/core-util/dist/esm/error.js new file mode 100644 index 0000000..8d107eb --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/error.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObject } from "./object.js"; +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export function isError(e) { + if (isObject(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; +} +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export function getErrorMessage(e) { + if (isError(e)) { + return e.message; + } + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } + else { + stringified = String(e); + } + } + catch (err) { + stringified = "[unable to stringify input]"; + } + return `Unknown error ${stringified}`; + } +} +//# sourceMappingURL=error.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/error.js.map b/node_modules/@azure/core-util/dist/esm/error.js.map new file mode 100644 index 0000000..3e33e10 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"error.js","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC;;;GAGG;AACH,MAAM,UAAU,OAAO,CAAC,CAAU;IAChC,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC;QAC3C,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,OAAO,KAAK,QAAQ,CAAC;QACjD,OAAO,OAAO,IAAI,UAAU,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,CAAU;IACxC,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACf,OAAO,CAAC,CAAC,OAAO,CAAC;IACnB,CAAC;SAAM,CAAC;QACN,IAAI,WAAmB,CAAC;QACxB,IAAI,CAAC;YACH,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,EAAE,CAAC;gBAC/B,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;iBAAM,CAAC;gBACN,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAC1B,CAAC;QACH,CAAC;QAAC,OAAO,GAAQ,EAAE,CAAC;YAClB,WAAW,GAAG,6BAA6B,CAAC;QAC9C,CAAC;QACD,OAAO,iBAAiB,WAAW,EAAE,CAAC;IACxC,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObject } from \"./object.js\";\n\n/**\n * Typeguard for an error object shape (has name and message)\n * @param e - Something caught by a catch clause.\n */\nexport function isError(e: unknown): e is Error {\n if (isObject(e)) {\n const hasName = typeof e.name === \"string\";\n const hasMessage = typeof e.message === \"string\";\n return hasName && hasMessage;\n }\n return false;\n}\n\n/**\n * Given what is thought to be an error object, return the message if possible.\n * If the message is missing, returns a stringified version of the input.\n * @param e - Something thrown from a try block\n * @returns The error message or a string of the input\n */\nexport function getErrorMessage(e: unknown): string {\n if (isError(e)) {\n return e.message;\n } else {\n let stringified: string;\n try {\n if (typeof e === \"object\" && e) {\n stringified = JSON.stringify(e);\n } else {\n stringified = String(e);\n }\n } catch (err: any) {\n stringified = \"[unable to stringify input]\";\n }\n return `Unknown error ${stringified}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/index.d.ts b/node_modules/@azure/core-util/dist/esm/index.d.ts new file mode 100644 index 0000000..14fada5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/index.d.ts @@ -0,0 +1,12 @@ +export { delay, type DelayOptions } from "./delay.js"; +export { type AbortOptions, cancelablePromiseRace, type AbortablePromiseBuilder, } from "./aborterUtils.js"; +export { createAbortablePromise, type CreateAbortablePromiseOptions, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject, type UnknownObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array, type EncodingType } from "./bytesEncoding.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/index.d.ts.map b/node_modules/@azure/core-util/dist/esm/index.d.ts.map new file mode 100644 index 0000000..77255da --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,KAAK,EAAE,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EACL,KAAK,YAAY,EACjB,qBAAqB,EACrB,KAAK,uBAAuB,GAC7B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,EACtB,KAAK,6BAA6B,GACnC,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,KAAK,YAAY,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/index.js b/node_modules/@azure/core-util/dist/esm/index.js new file mode 100644 index 0000000..64b7b80 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/index.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { delay } from "./delay.js"; +export { cancelablePromiseRace, } from "./aborterUtils.js"; +export { createAbortablePromise, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array } from "./bytesEncoding.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/index.js.map b/node_modules/@azure/core-util/dist/esm/index.js.map new file mode 100644 index 0000000..3d3e618 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,KAAK,EAAqB,MAAM,YAAY,CAAC;AACtD,OAAO,EAEL,qBAAqB,GAEtB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,GAEvB,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAsB,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAqB,MAAM,oBAAoB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { delay, type DelayOptions } from \"./delay.js\";\nexport {\n type AbortOptions,\n cancelablePromiseRace,\n type AbortablePromiseBuilder,\n} from \"./aborterUtils.js\";\nexport {\n createAbortablePromise,\n type CreateAbortablePromiseOptions,\n} from \"./createAbortablePromise.js\";\nexport { getRandomIntegerInclusive } from \"./random.js\";\nexport { isObject, type UnknownObject } from \"./object.js\";\nexport { isError, getErrorMessage } from \"./error.js\";\nexport { computeSha256Hash, computeSha256Hmac } from \"./sha256.js\";\nexport { isDefined, isObjectWithProperties, objectHasProperty } from \"./typeGuards.js\";\nexport { randomUUID } from \"./uuidUtils.js\";\nexport {\n isBrowser,\n isBun,\n isNode,\n isDeno,\n isReactNative,\n isWebWorker,\n} from \"./checkEnvironment.js\";\nexport { uint8ArrayToString, stringToUint8Array, type EncodingType } from \"./bytesEncoding.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/object.d.ts b/node_modules/@azure/core-util/dist/esm/object.d.ts new file mode 100644 index 0000000..fc3f33a --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/object.d.ts @@ -0,0 +1,12 @@ +/** + * A generic shape for a plain JS object. + */ +export type UnknownObject = { + [s: string]: unknown; +}; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=object.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/object.d.ts.map b/node_modules/@azure/core-util/dist/esm/object.d.ts.map new file mode 100644 index 0000000..66b39bd --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/object.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"object.d.ts","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/object.js b/node_modules/@azure/core-util/dist/esm/object.js new file mode 100644 index 0000000..ff04af8 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/object.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/object.js.map b/node_modules/@azure/core-util/dist/esm/object.js.map new file mode 100644 index 0000000..3a5f096 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/object.js.map @@ -0,0 +1 @@ +{"version":3,"file":"object.js","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A generic shape for a plain JS object.\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * Helper to determine when an input is a generic JS object.\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/package.json b/node_modules/@azure/core-util/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-util/dist/esm/random.d.ts b/node_modules/@azure/core-util/dist/esm/random.d.ts new file mode 100644 index 0000000..9e9631a --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/random.d.ts @@ -0,0 +1,10 @@ +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export declare function getRandomIntegerInclusive(min: number, max: number): number; +//# sourceMappingURL=random.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/random.d.ts.map b/node_modules/@azure/core-util/dist/esm/random.d.ts.map new file mode 100644 index 0000000..8a91316 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/random.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"random.d.ts","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAGA;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,MAAM,CAS1E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/random.js b/node_modules/@azure/core-util/dist/esm/random.js new file mode 100644 index 0000000..aef3288 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/random.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; +} +//# sourceMappingURL=random.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/random.js.map b/node_modules/@azure/core-util/dist/esm/random.js.map new file mode 100644 index 0000000..93d4d74 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/random.js.map @@ -0,0 +1 @@ +{"version":3,"file":"random.js","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,UAAU,yBAAyB,CAAC,GAAW,EAAE,GAAW;IAChE,iCAAiC;IACjC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACrB,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACtB,2DAA2D;IAC3D,+EAA+E;IAC/E,yEAAyE;IACzE,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;IAC3D,OAAO,MAAM,GAAG,GAAG,CAAC;AACtB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Returns a random integer value between a lower and upper bound,\n * inclusive of both bounds.\n * Note that this uses Math.random and isn't secure. If you need to use\n * this for any kind of security purpose, find a better source of random.\n * @param min - The smallest integer value allowed.\n * @param max - The largest integer value allowed.\n */\nexport function getRandomIntegerInclusive(min: number, max: number): number {\n // Make sure inputs are integers.\n min = Math.ceil(min);\n max = Math.floor(max);\n // Pick a random offset from zero to the size of the range.\n // Since Math.random() can never return 1, we have to make the range one larger\n // in order to be inclusive of the maximum value after we take the floor.\n const offset = Math.floor(Math.random() * (max - min + 1));\n return offset + min;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/sha256.d.ts b/node_modules/@azure/core-util/dist/esm/sha256.d.ts new file mode 100644 index 0000000..a4b7b98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/sha256.d.ts @@ -0,0 +1,14 @@ +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export declare function computeSha256Hmac(key: string, stringToSign: string, encoding: "base64" | "hex"): Promise; +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export declare function computeSha256Hash(content: string, encoding: "base64" | "hex"): Promise; +//# sourceMappingURL=sha256.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/sha256.d.ts.map b/node_modules/@azure/core-util/dist/esm/sha256.d.ts.map new file mode 100644 index 0000000..fffa8fc --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/sha256.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256.d.ts","sourceRoot":"","sources":["../../src/sha256.ts"],"names":[],"mappings":"AAKA;;;;;GAKG;AACH,wBAAsB,iBAAiB,CACrC,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,MAAM,EACpB,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAIjB;AAED;;;;GAIG;AACH,wBAAsB,iBAAiB,CACrC,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAEjB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/sha256.js b/node_modules/@azure/core-util/dist/esm/sha256.js new file mode 100644 index 0000000..6704d38 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/sha256.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHash, createHmac } from "crypto"; +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return createHmac("sha256", decodedKey).update(stringToSign).digest(encoding); +} +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export async function computeSha256Hash(content, encoding) { + return createHash("sha256").update(content).digest(encoding); +} +//# sourceMappingURL=sha256.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/sha256.js.map b/node_modules/@azure/core-util/dist/esm/sha256.js.map new file mode 100644 index 0000000..2f3932e --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/sha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256.js","sourceRoot":"","sources":["../../src/sha256.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEhD;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,GAAW,EACX,YAAoB,EACpB,QAA0B;IAE1B,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;IAE9C,OAAO,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAChF,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,OAAe,EACf,QAA0B;IAE1B,OAAO,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAC/D,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHash, createHmac } from \"crypto\";\n\n/**\n * Generates a SHA-256 HMAC signature.\n * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash.\n * @param stringToSign - The data to be signed.\n * @param encoding - The textual encoding to use for the returned HMAC digest.\n */\nexport async function computeSha256Hmac(\n key: string,\n stringToSign: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n const decodedKey = Buffer.from(key, \"base64\");\n\n return createHmac(\"sha256\", decodedKey).update(stringToSign).digest(encoding);\n}\n\n/**\n * Generates a SHA-256 hash.\n * @param content - The data to be included in the hash.\n * @param encoding - The textual encoding to use for the returned hash.\n */\nexport async function computeSha256Hash(\n content: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n return createHash(\"sha256\").update(content).digest(encoding);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/typeGuards.d.ts b/node_modules/@azure/core-util/dist/esm/typeGuards.d.ts new file mode 100644 index 0000000..50ccc5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/typeGuards.d.ts @@ -0,0 +1,18 @@ +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export declare function isObjectWithProperties(thing: Thing, properties: PropertyName[]): thing is Thing & Record; +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export declare function objectHasProperty(thing: Thing, property: PropertyName): thing is Thing & Record; +//# sourceMappingURL=typeGuards.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/typeGuards.d.ts.map b/node_modules/@azure/core-util/dist/esm/typeGuards.d.ts.map new file mode 100644 index 0000000..f20f041 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/typeGuards.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.d.ts","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,CAEpE;AAED;;;;GAIG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EACvE,KAAK,EAAE,KAAK,EACZ,UAAU,EAAE,YAAY,EAAE,GACzB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAYhD;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EAClE,KAAK,EAAE,KAAK,EACZ,QAAQ,EAAE,YAAY,GACrB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAIhD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/typeGuards.js b/node_modules/@azure/core-util/dist/esm/typeGuards.js new file mode 100644 index 0000000..f45852d --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/typeGuards.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/typeGuards.js.map b/node_modules/@azure/core-util/dist/esm/typeGuards.js.map new file mode 100644 index 0000000..f6586c3 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/typeGuards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.js","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,SAAS,CAAI,KAA2B;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;QACnD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE,CAAC;QAClC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACxC,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAC/B,KAAY,EACZ,QAAsB;IAEtB,OAAO,CACL,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAChG,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[],\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n */\nexport function objectHasProperty(\n thing: Thing,\n property: PropertyName,\n): thing is Thing & Record {\n return (\n isDefined(thing) && typeof thing === \"object\" && property in (thing as Record)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.common.d.ts b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.d.ts new file mode 100644 index 0000000..8f1c9ba --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.d.ts @@ -0,0 +1,13 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUUID(): string; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.common.d.ts.map b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.d.ts.map new file mode 100644 index 0000000..0d19e4a --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.d.ts","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAqBrC;AAED;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.common.js b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.js new file mode 100644 index 0000000..8484013 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function generateUUID() { + let uuid = ""; + for (let i = 0; i < 32; i++) { + // Generate a random number between 0 and 15 + const randomNumber = Math.floor(Math.random() * 16); + // Set the UUID version to 4 in the 13th position + if (i === 12) { + uuid += "4"; + } + else if (i === 16) { + // Set the UUID variant to "10" in the 17th position + uuid += (randomNumber & 0x3) | 0x8; + } + else { + // Add a random hexadecimal digit to the UUID string + uuid += randomNumber.toString(16); + } + // Add hyphens to the UUID string at the appropriate positions + if (i === 7 || i === 11 || i === 15 || i === 19) { + uuid += "-"; + } + } + return uuid; +} +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function randomUUID() { + return generateUUID(); +} +//# sourceMappingURL=uuidUtils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.common.js.map b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.js.map new file mode 100644 index 0000000..78e1c16 --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.js","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,IAAI,IAAI,GAAG,EAAE,CAAC;IACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5B,4CAA4C;QAC5C,MAAM,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC;QACpD,iDAAiD;QACjD,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACb,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;aAAM,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACpB,oDAAoD;YACpD,IAAI,IAAI,CAAC,YAAY,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QACrC,CAAC;aAAM,CAAC;YACN,oDAAoD;YACpD,IAAI,IAAI,YAAY,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;QACD,8DAA8D;QAC9D,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YAChD,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUUID(): string {\n let uuid = \"\";\n for (let i = 0; i < 32; i++) {\n // Generate a random number between 0 and 15\n const randomNumber = Math.floor(Math.random() * 16);\n // Set the UUID version to 4 in the 13th position\n if (i === 12) {\n uuid += \"4\";\n } else if (i === 16) {\n // Set the UUID variant to \"10\" in the 17th position\n uuid += (randomNumber & 0x3) | 0x8;\n } else {\n // Add a random hexadecimal digit to the UUID string\n uuid += randomNumber.toString(16);\n }\n // Add hyphens to the UUID string at the appropriate positions\n if (i === 7 || i === 11 || i === 15 || i === 19) {\n uuid += \"-\";\n }\n }\n return uuid;\n}\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return generateUUID();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.d.ts b/node_modules/@azure/core-util/dist/esm/uuidUtils.d.ts new file mode 100644 index 0000000..f510a4b --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.d.ts @@ -0,0 +1,7 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.d.ts.map b/node_modules/@azure/core-util/dist/esm/uuidUtils.d.ts.map new file mode 100644 index 0000000..b3d1b5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.d.ts","sourceRoot":"","sources":["../../src/uuidUtils.ts"],"names":[],"mappings":"AAmBA;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.js b/node_modules/@azure/core-util/dist/esm/uuidUtils.js new file mode 100644 index 0000000..f20c10c --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.js @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { randomUUID as v4RandomUUID } from "crypto"; +// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. +const uuidFunction = typeof globalThis?.crypto?.randomUUID === "function" + ? globalThis.crypto.randomUUID.bind(globalThis.crypto) + : v4RandomUUID; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function randomUUID() { + return uuidFunction(); +} +//# sourceMappingURL=uuidUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.js.map b/node_modules/@azure/core-util/dist/esm/uuidUtils.js.map new file mode 100644 index 0000000..d71e51a --- /dev/null +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.js","sourceRoot":"","sources":["../../src/uuidUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,IAAI,YAAY,EAAE,MAAM,QAAQ,CAAC;AAUpD,6FAA6F;AAC7F,MAAM,YAAY,GAChB,OAAO,UAAU,EAAE,MAAM,EAAE,UAAU,KAAK,UAAU;IAClD,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IACtD,CAAC,CAAC,YAAY,CAAC;AAEnB;;;;GAIG;AACH,MAAM,UAAU,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { randomUUID as v4RandomUUID } from \"crypto\";\n\ninterface Crypto {\n randomUUID(): string;\n}\n\ndeclare const globalThis: {\n crypto: Crypto;\n};\n\n// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+.\nconst uuidFunction =\n typeof globalThis?.crypto?.randomUUID === \"function\"\n ? globalThis.crypto.randomUUID.bind(globalThis.crypto)\n : v4RandomUUID;\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return uuidFunction();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/aborterUtils.d.ts b/node_modules/@azure/core-util/dist/react-native/aborterUtils.d.ts new file mode 100644 index 0000000..89a2f98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/aborterUtils.d.ts @@ -0,0 +1,27 @@ +import type { AbortSignalLike } from "@azure/abort-controller"; +/** + * Options related to abort controller. + */ +export interface AbortOptions { + /** + * The abortSignal associated with containing operation. + */ + abortSignal?: AbortSignalLike; + /** + * The abort error message associated with containing operation. + */ + abortErrorMsg?: string; +} +/** + * Represents a function that returns a promise that can be aborted. + */ +export type AbortablePromiseBuilder = (abortOptions: { + abortSignal?: AbortSignalLike; +}) => Promise; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export declare function cancelablePromiseRace(abortablePromiseBuilders: AbortablePromiseBuilder[], options?: { + abortSignal?: AbortSignalLike; +}): Promise; +//# sourceMappingURL=aborterUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/aborterUtils.d.ts.map b/node_modules/@azure/core-util/dist/react-native/aborterUtils.d.ts.map new file mode 100644 index 0000000..e7f705b --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/aborterUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.d.ts","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,MAAM,uBAAuB,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE;IACtD,WAAW,CAAC,EAAE,eAAe,CAAC;CAC/B,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC;AAEjB;;GAEG;AACH,wBAAsB,qBAAqB,CAAC,CAAC,SAAS,OAAO,EAAE,EAC7D,wBAAwB,EAAE,uBAAuB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,EAC9D,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,eAAe,CAAA;CAAE,GAC1C,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAcpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/aborterUtils.js b/node_modules/@azure/core-util/dist/react-native/aborterUtils.js new file mode 100644 index 0000000..c34a95d --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/aborterUtils.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +export async function cancelablePromiseRace(abortablePromiseBuilders, options) { + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); + } + options?.abortSignal?.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + options?.abortSignal?.removeEventListener("abort", abortHandler); + } +} +//# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/aborterUtils.js.map b/node_modules/@azure/core-util/dist/react-native/aborterUtils.js.map new file mode 100644 index 0000000..ff71dce --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/aborterUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"aborterUtils.js","sourceRoot":"","sources":["../../src/aborterUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAyBlC;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,wBAA8D,EAC9D,OAA2C;IAE3C,MAAM,OAAO,GAAG,IAAI,eAAe,EAAE,CAAC;IACtC,SAAS,YAAY;QACnB,OAAO,CAAC,KAAK,EAAE,CAAC;IAClB,CAAC;IACD,OAAO,EAAE,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IAC9D,IAAI,CAAC;QACH,OAAO,MAAM,OAAO,CAAC,IAAI,CACvB,wBAAwB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CACxE,CAAC;IACJ,CAAC;YAAS,CAAC;QACT,OAAO,CAAC,KAAK,EAAE,CAAC;QAChB,OAAO,EAAE,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IACnE,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options related to abort controller.\n */\nexport interface AbortOptions {\n /**\n * The abortSignal associated with containing operation.\n */\n abortSignal?: AbortSignalLike;\n /**\n * The abort error message associated with containing operation.\n */\n abortErrorMsg?: string;\n}\n\n/**\n * Represents a function that returns a promise that can be aborted.\n */\nexport type AbortablePromiseBuilder = (abortOptions: {\n abortSignal?: AbortSignalLike;\n}) => Promise;\n\n/**\n * promise.race() wrapper that aborts rest of promises as soon as the first promise settles.\n */\nexport async function cancelablePromiseRace(\n abortablePromiseBuilders: AbortablePromiseBuilder[],\n options?: { abortSignal?: AbortSignalLike },\n): Promise {\n const aborter = new AbortController();\n function abortHandler(): void {\n aborter.abort();\n }\n options?.abortSignal?.addEventListener(\"abort\", abortHandler);\n try {\n return await Promise.race(\n abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal })),\n );\n } finally {\n aborter.abort();\n options?.abortSignal?.removeEventListener(\"abort\", abortHandler);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/bytesEncoding.d.ts b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.d.ts new file mode 100644 index 0000000..48a9754 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.d.ts @@ -0,0 +1,17 @@ +/** The supported character encoding type */ +export type EncodingType = "utf-8" | "base64" | "base64url" | "hex"; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export declare function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string; +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export declare function stringToUint8Array(value: string, format: EncodingType): Uint8Array; +//# sourceMappingURL=bytesEncoding.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/bytesEncoding.d.ts.map b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.d.ts.map new file mode 100644 index 0000000..700cf2c --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding.d.ts","sourceRoot":"","sources":["../../src/bytesEncoding.ts"],"names":[],"mappings":"AAGA,4CAA4C;AAC5C,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,QAAQ,GAAG,WAAW,GAAG,KAAK,CAAC;AAEpE;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAElF;AAED;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,GAAG,UAAU,CAElF"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/bytesEncoding.js b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.js new file mode 100644 index 0000000..a38cbeb --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +export function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +export function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} +//# sourceMappingURL=bytesEncoding.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/bytesEncoding.js.map b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.js.map new file mode 100644 index 0000000..2abf518 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/bytesEncoding.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bytesEncoding.js","sourceRoot":"","sources":["../../src/bytesEncoding.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAiB,EAAE,MAAoB;IACxE,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC7C,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAa,EAAE,MAAoB;IACpE,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AACpC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/** The supported character encoding type */\nexport type EncodingType = \"utf-8\" | \"base64\" | \"base64url\" | \"hex\";\n\n/**\n * The helper that transforms bytes with specific character encoding into string\n * @param bytes - the uint8array bytes\n * @param format - the format we use to encode the byte\n * @returns a string of the encoded string\n */\nexport function uint8ArrayToString(bytes: Uint8Array, format: EncodingType): string {\n return Buffer.from(bytes).toString(format);\n}\n\n/**\n * The helper that transforms string to specific character encoded bytes array.\n * @param value - the string to be converted\n * @param format - the format we use to decode the value\n * @returns a uint8array\n */\nexport function stringToUint8Array(value: string, format: EncodingType): Uint8Array {\n return Buffer.from(value, format);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/checkEnvironment.d.ts b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.d.ts new file mode 100644 index 0000000..63273c1 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.d.ts @@ -0,0 +1,25 @@ +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +export declare const isBrowser: boolean; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export declare const isWebWorker: boolean; +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export declare const isDeno: boolean; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export declare const isBun: boolean; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export declare const isNode: boolean; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +export declare const isReactNative: boolean; +//# sourceMappingURL=checkEnvironment.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/checkEnvironment.d.ts.map b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.d.ts.map new file mode 100644 index 0000000..ccff768 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.d.ts","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAoCA;;GAEG;AAEH,eAAO,MAAM,SAAS,SAA0E,CAAC;AAEjG;;GAEG;AACH,eAAO,MAAM,WAAW,SAKiC,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,MAAM,SAGuB,CAAC;AAE3C;;GAEG;AACH,eAAO,MAAM,KAAK,SAAmE,CAAC;AAEtF;;GAEG;AACH,eAAO,MAAM,MAAM,SAMX,CAAC;AAET;;GAEG;AAEH,eAAO,MAAM,aAAa,SACgD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js new file mode 100644 index 0000000..53ba62e --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +export const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +export const isWebWorker = typeof self === "object" && + typeof self?.importScripts === "function" && + (self.constructor?.name === "DedicatedWorkerGlobalScope" || + self.constructor?.name === "ServiceWorkerGlobalScope" || + self.constructor?.name === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +export const isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +export const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export const isNode = typeof globalThis.process !== "undefined" && + Boolean(globalThis.process.version) && + Boolean(globalThis.process.versions?.node) && + // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + !isDeno && + !isBun; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +export const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +//# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js.map b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js.map new file mode 100644 index 0000000..5b1f90f --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js.map @@ -0,0 +1 @@ +{"version":3,"file":"checkEnvironment.js","sourceRoot":"","sources":["../../src/checkEnvironment.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAmClC;;GAEG;AACH,yDAAyD;AACzD,MAAM,CAAC,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,WAAW,CAAC;AAEjG;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GACtB,OAAO,IAAI,KAAK,QAAQ;IACxB,OAAO,IAAI,EAAE,aAAa,KAAK,UAAU;IACzC,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,4BAA4B;QACtD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,0BAA0B;QACrD,IAAI,CAAC,WAAW,EAAE,IAAI,KAAK,yBAAyB,CAAC,CAAC;AAE1D;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,IAAI,KAAK,WAAW;IAC3B,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW;IACnC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,WAAW,CAAC;AAE3C;;GAEG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,WAAW,CAAC;AAEtF;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,UAAU,CAAC,OAAO,KAAK,WAAW;IACzC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC;IACnC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC;IAC1C,+HAA+H;IAC/H,CAAC,MAAM;IACP,CAAC,KAAK,CAAC;AAET;;GAEG;AACH,4GAA4G;AAC5G,MAAM,CAAC,MAAM,aAAa,GACxB,OAAO,SAAS,KAAK,WAAW,IAAI,SAAS,EAAE,OAAO,KAAK,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ninterface Window {\n document: unknown;\n}\n\ninterface DedicatedWorkerGlobalScope {\n constructor: {\n name: string;\n };\n\n importScripts: (...paths: string[]) => void;\n}\n\ninterface Navigator {\n product: string;\n}\n\ninterface DenoGlobal {\n version: {\n deno: string;\n };\n}\n\ninterface BunGlobal {\n version: string;\n}\n\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\ndeclare const window: Window;\ndeclare const self: DedicatedWorkerGlobalScope;\ndeclare const Deno: DenoGlobal;\ndeclare const Bun: BunGlobal;\ndeclare const navigator: Navigator;\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Browser.\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-no-window\nexport const isBrowser = typeof window !== \"undefined\" && typeof window.document !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is a Web Worker.\n */\nexport const isWebWorker =\n typeof self === \"object\" &&\n typeof self?.importScripts === \"function\" &&\n (self.constructor?.name === \"DedicatedWorkerGlobalScope\" ||\n self.constructor?.name === \"ServiceWorkerGlobalScope\" ||\n self.constructor?.name === \"SharedWorkerGlobalScope\");\n\n/**\n * A constant that indicates whether the environment the code is running is Deno.\n */\nexport const isDeno =\n typeof Deno !== \"undefined\" &&\n typeof Deno.version !== \"undefined\" &&\n typeof Deno.version.deno !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Bun.sh.\n */\nexport const isBun = typeof Bun !== \"undefined\" && typeof Bun.version !== \"undefined\";\n\n/**\n * A constant that indicates whether the environment the code is running is Node.JS.\n */\nexport const isNode =\n typeof globalThis.process !== \"undefined\" &&\n Boolean(globalThis.process.version) &&\n Boolean(globalThis.process.versions?.node) &&\n // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions\n !isDeno &&\n !isBun;\n\n/**\n * A constant that indicates whether the environment the code is running is in React-Native.\n */\n// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js\nexport const isReactNative =\n typeof navigator !== \"undefined\" && navigator?.product === \"ReactNative\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.d.ts b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.d.ts new file mode 100644 index 0000000..183c705 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.d.ts @@ -0,0 +1,16 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for the createAbortablePromise function. + */ +export interface CreateAbortablePromiseOptions extends AbortOptions { + /** A function to be called if the promise was aborted */ + cleanupBeforeAbort?: () => void; +} +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export declare function createAbortablePromise(buildPromise: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void, options?: CreateAbortablePromiseOptions): Promise; +//# sourceMappingURL=createAbortablePromise.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.d.ts.map b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.d.ts.map new file mode 100644 index 0000000..705d2e9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.d.ts","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAEtD;;GAEG;AACH,MAAM,WAAW,6BAA8B,SAAQ,YAAY;IACjE,yDAAyD;IACzD,kBAAkB,CAAC,EAAE,MAAM,IAAI,CAAC;CACjC;AAED;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,EACtC,YAAY,EAAE,CACZ,OAAO,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,KAAK,IAAI,EAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,KAC3B,IAAI,EACT,OAAO,CAAC,EAAE,6BAA6B,GACtC,OAAO,CAAC,CAAC,CAAC,CAiCZ"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js new file mode 100644 index 0000000..fc54e7c --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +export function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new AbortError(abortErrorMsg ?? "The operation was aborted.")); + } + function removeListeners() { + abortSignal?.removeEventListener("abort", onAbort); + } + function onAbort() { + cleanupBeforeAbort?.(); + removeListeners(); + rejectOnAbort(); + } + if (abortSignal?.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); + }); + } + catch (err) { + reject(err); + } + abortSignal?.addEventListener("abort", onAbort); + }); +} +//# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js.map b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js.map new file mode 100644 index 0000000..6f928a9 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createAbortablePromise.js","sourceRoot":"","sources":["../../src/createAbortablePromise.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAWrD;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,YAGS,EACT,OAAuC;IAEvC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACzE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,SAAS,aAAa;YACpB,MAAM,CAAC,IAAI,UAAU,CAAC,aAAa,IAAI,4BAA4B,CAAC,CAAC,CAAC;QACxE,CAAC;QACD,SAAS,eAAe;YACtB,WAAW,EAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACrD,CAAC;QACD,SAAS,OAAO;YACd,kBAAkB,EAAE,EAAE,CAAC;YACvB,eAAe,EAAE,CAAC;YAClB,aAAa,EAAE,CAAC;QAClB,CAAC;QACD,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;YACzB,OAAO,aAAa,EAAE,CAAC;QACzB,CAAC;QACD,IAAI,CAAC;YACH,YAAY,CACV,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,OAAO,CAAC,CAAC,CAAC,CAAC;YACb,CAAC,EACD,CAAC,CAAC,EAAE,EAAE;gBACJ,eAAe,EAAE,CAAC;gBAClB,MAAM,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC,CACF,CAAC;QACJ,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC;QACD,WAAW,EAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAClD,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\nimport type { AbortOptions } from \"./aborterUtils.js\";\n\n/**\n * Options for the createAbortablePromise function.\n */\nexport interface CreateAbortablePromiseOptions extends AbortOptions {\n /** A function to be called if the promise was aborted */\n cleanupBeforeAbort?: () => void;\n}\n\n/**\n * Creates an abortable promise.\n * @param buildPromise - A function that takes the resolve and reject functions as parameters.\n * @param options - The options for the abortable promise.\n * @returns A promise that can be aborted.\n */\nexport function createAbortablePromise(\n buildPromise: (\n resolve: (value: T | PromiseLike) => void,\n reject: (reason?: any) => void,\n ) => void,\n options?: CreateAbortablePromiseOptions,\n): Promise {\n const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {};\n return new Promise((resolve, reject) => {\n function rejectOnAbort(): void {\n reject(new AbortError(abortErrorMsg ?? \"The operation was aborted.\"));\n }\n function removeListeners(): void {\n abortSignal?.removeEventListener(\"abort\", onAbort);\n }\n function onAbort(): void {\n cleanupBeforeAbort?.();\n removeListeners();\n rejectOnAbort();\n }\n if (abortSignal?.aborted) {\n return rejectOnAbort();\n }\n try {\n buildPromise(\n (x) => {\n removeListeners();\n resolve(x);\n },\n (x) => {\n removeListeners();\n reject(x);\n },\n );\n } catch (err) {\n reject(err);\n }\n abortSignal?.addEventListener(\"abort\", onAbort);\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/delay.d.ts b/node_modules/@azure/core-util/dist/react-native/delay.d.ts new file mode 100644 index 0000000..9dc4b5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/delay.d.ts @@ -0,0 +1,14 @@ +import type { AbortOptions } from "./aborterUtils.js"; +/** + * Options for support abort functionality for the delay method + */ +export interface DelayOptions extends AbortOptions { +} +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export declare function delay(timeInMs: number, options?: DelayOptions): Promise; +//# sourceMappingURL=delay.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/delay.d.ts.map b/node_modules/@azure/core-util/dist/react-native/delay.d.ts.map new file mode 100644 index 0000000..8d2d31b --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/delay.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAKtD;;GAEG;AACH,MAAM,WAAW,YAAa,SAAQ,YAAY;CAAG;AAErD;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/delay.js b/node_modules/@azure/core-util/dist/react-native/delay.js new file mode 100644 index 0000000..3e12ea5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/delay.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createAbortablePromise } from "./createAbortablePromise.js"; +const StandardAbortMessage = "The delay was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +export function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options ?? {}; + return createAbortablePromise((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + }); +} +//# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/delay.js.map b/node_modules/@azure/core-util/dist/react-native/delay.js.map new file mode 100644 index 0000000..b02f90c --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/delay.js.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.js","sourceRoot":"","sources":["../../src/delay.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AAErE,MAAM,oBAAoB,GAAG,wBAAwB,CAAC;AAOtD;;;;;GAKG;AACH,MAAM,UAAU,KAAK,CAAC,QAAgB,EAAE,OAAsB;IAC5D,IAAI,KAAoC,CAAC;IACzC,MAAM,EAAE,WAAW,EAAE,aAAa,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrD,OAAO,sBAAsB,CAC3B,CAAC,OAAO,EAAE,EAAE;QACV,KAAK,GAAG,UAAU,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;IACxC,CAAC,EACD;QACE,kBAAkB,EAAE,GAAG,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC;QAC7C,WAAW;QACX,aAAa,EAAE,aAAa,IAAI,oBAAoB;KACrD,CACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport type { AbortOptions } from \"./aborterUtils.js\";\nimport { createAbortablePromise } from \"./createAbortablePromise.js\";\n\nconst StandardAbortMessage = \"The delay was aborted.\";\n\n/**\n * Options for support abort functionality for the delay method\n */\nexport interface DelayOptions extends AbortOptions {}\n\n/**\n * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds.\n * @param timeInMs - The number of milliseconds to be delayed.\n * @param options - The options for delay - currently abort options\n * @returns Promise that is resolved after timeInMs\n */\nexport function delay(timeInMs: number, options?: DelayOptions): Promise {\n let token: ReturnType;\n const { abortSignal, abortErrorMsg } = options ?? {};\n return createAbortablePromise(\n (resolve) => {\n token = setTimeout(resolve, timeInMs);\n },\n {\n cleanupBeforeAbort: () => clearTimeout(token),\n abortSignal,\n abortErrorMsg: abortErrorMsg ?? StandardAbortMessage,\n },\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/error.d.ts b/node_modules/@azure/core-util/dist/react-native/error.d.ts new file mode 100644 index 0000000..5027f12 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/error.d.ts @@ -0,0 +1,13 @@ +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export declare function isError(e: unknown): e is Error; +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export declare function getErrorMessage(e: unknown): string; +//# sourceMappingURL=error.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/error.d.ts.map b/node_modules/@azure/core-util/dist/react-native/error.d.ts.map new file mode 100644 index 0000000..d56797d --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"error.d.ts","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,wBAAgB,OAAO,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,KAAK,CAO9C;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,CAAC,EAAE,OAAO,GAAG,MAAM,CAgBlD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/error.js b/node_modules/@azure/core-util/dist/react-native/error.js new file mode 100644 index 0000000..8d107eb --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/error.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObject } from "./object.js"; +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +export function isError(e) { + if (isObject(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; +} +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +export function getErrorMessage(e) { + if (isError(e)) { + return e.message; + } + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } + else { + stringified = String(e); + } + } + catch (err) { + stringified = "[unable to stringify input]"; + } + return `Unknown error ${stringified}`; + } +} +//# sourceMappingURL=error.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/error.js.map b/node_modules/@azure/core-util/dist/react-native/error.js.map new file mode 100644 index 0000000..3e33e10 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"error.js","sourceRoot":"","sources":["../../src/error.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC;;;GAGG;AACH,MAAM,UAAU,OAAO,CAAC,CAAU;IAChC,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC;QAC3C,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,OAAO,KAAK,QAAQ,CAAC;QACjD,OAAO,OAAO,IAAI,UAAU,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,CAAU;IACxC,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACf,OAAO,CAAC,CAAC,OAAO,CAAC;IACnB,CAAC;SAAM,CAAC;QACN,IAAI,WAAmB,CAAC;QACxB,IAAI,CAAC;YACH,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,EAAE,CAAC;gBAC/B,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;iBAAM,CAAC;gBACN,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAC1B,CAAC;QACH,CAAC;QAAC,OAAO,GAAQ,EAAE,CAAC;YAClB,WAAW,GAAG,6BAA6B,CAAC;QAC9C,CAAC;QACD,OAAO,iBAAiB,WAAW,EAAE,CAAC;IACxC,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObject } from \"./object.js\";\n\n/**\n * Typeguard for an error object shape (has name and message)\n * @param e - Something caught by a catch clause.\n */\nexport function isError(e: unknown): e is Error {\n if (isObject(e)) {\n const hasName = typeof e.name === \"string\";\n const hasMessage = typeof e.message === \"string\";\n return hasName && hasMessage;\n }\n return false;\n}\n\n/**\n * Given what is thought to be an error object, return the message if possible.\n * If the message is missing, returns a stringified version of the input.\n * @param e - Something thrown from a try block\n * @returns The error message or a string of the input\n */\nexport function getErrorMessage(e: unknown): string {\n if (isError(e)) {\n return e.message;\n } else {\n let stringified: string;\n try {\n if (typeof e === \"object\" && e) {\n stringified = JSON.stringify(e);\n } else {\n stringified = String(e);\n }\n } catch (err: any) {\n stringified = \"[unable to stringify input]\";\n }\n return `Unknown error ${stringified}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/index.d.ts b/node_modules/@azure/core-util/dist/react-native/index.d.ts new file mode 100644 index 0000000..14fada5 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/index.d.ts @@ -0,0 +1,12 @@ +export { delay, type DelayOptions } from "./delay.js"; +export { type AbortOptions, cancelablePromiseRace, type AbortablePromiseBuilder, } from "./aborterUtils.js"; +export { createAbortablePromise, type CreateAbortablePromiseOptions, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject, type UnknownObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array, type EncodingType } from "./bytesEncoding.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/index.d.ts.map b/node_modules/@azure/core-util/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..77255da --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,KAAK,EAAE,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EACL,KAAK,YAAY,EACjB,qBAAqB,EACrB,KAAK,uBAAuB,GAC7B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,EACtB,KAAK,6BAA6B,GACnC,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,KAAK,YAAY,EAAE,MAAM,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/index.js b/node_modules/@azure/core-util/dist/react-native/index.js new file mode 100644 index 0000000..64b7b80 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/index.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { delay } from "./delay.js"; +export { cancelablePromiseRace, } from "./aborterUtils.js"; +export { createAbortablePromise, } from "./createAbortablePromise.js"; +export { getRandomIntegerInclusive } from "./random.js"; +export { isObject } from "./object.js"; +export { isError, getErrorMessage } from "./error.js"; +export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; +export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; +export { randomUUID } from "./uuidUtils.js"; +export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { uint8ArrayToString, stringToUint8Array } from "./bytesEncoding.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/index.js.map b/node_modules/@azure/core-util/dist/react-native/index.js.map new file mode 100644 index 0000000..3d3e618 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,KAAK,EAAqB,MAAM,YAAY,CAAC;AACtD,OAAO,EAEL,qBAAqB,GAEtB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,sBAAsB,GAEvB,MAAM,6BAA6B,CAAC;AACrC,OAAO,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAsB,MAAM,aAAa,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAE,SAAS,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACvF,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EACL,SAAS,EACT,KAAK,EACL,MAAM,EACN,MAAM,EACN,aAAa,EACb,WAAW,GACZ,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAqB,MAAM,oBAAoB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { delay, type DelayOptions } from \"./delay.js\";\nexport {\n type AbortOptions,\n cancelablePromiseRace,\n type AbortablePromiseBuilder,\n} from \"./aborterUtils.js\";\nexport {\n createAbortablePromise,\n type CreateAbortablePromiseOptions,\n} from \"./createAbortablePromise.js\";\nexport { getRandomIntegerInclusive } from \"./random.js\";\nexport { isObject, type UnknownObject } from \"./object.js\";\nexport { isError, getErrorMessage } from \"./error.js\";\nexport { computeSha256Hash, computeSha256Hmac } from \"./sha256.js\";\nexport { isDefined, isObjectWithProperties, objectHasProperty } from \"./typeGuards.js\";\nexport { randomUUID } from \"./uuidUtils.js\";\nexport {\n isBrowser,\n isBun,\n isNode,\n isDeno,\n isReactNative,\n isWebWorker,\n} from \"./checkEnvironment.js\";\nexport { uint8ArrayToString, stringToUint8Array, type EncodingType } from \"./bytesEncoding.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/object.d.ts b/node_modules/@azure/core-util/dist/react-native/object.d.ts new file mode 100644 index 0000000..fc3f33a --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/object.d.ts @@ -0,0 +1,12 @@ +/** + * A generic shape for a plain JS object. + */ +export type UnknownObject = { + [s: string]: unknown; +}; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=object.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/object.d.ts.map b/node_modules/@azure/core-util/dist/react-native/object.d.ts.map new file mode 100644 index 0000000..66b39bd --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/object.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"object.d.ts","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/object.js b/node_modules/@azure/core-util/dist/react-native/object.js new file mode 100644 index 0000000..ff04af8 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/object.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/object.js.map b/node_modules/@azure/core-util/dist/react-native/object.js.map new file mode 100644 index 0000000..3a5f096 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/object.js.map @@ -0,0 +1 @@ +{"version":3,"file":"object.js","sourceRoot":"","sources":["../../src/object.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A generic shape for a plain JS object.\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * Helper to determine when an input is a generic JS object.\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/package.json b/node_modules/@azure/core-util/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-util/dist/react-native/random.d.ts b/node_modules/@azure/core-util/dist/react-native/random.d.ts new file mode 100644 index 0000000..9e9631a --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/random.d.ts @@ -0,0 +1,10 @@ +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export declare function getRandomIntegerInclusive(min: number, max: number): number; +//# sourceMappingURL=random.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/random.d.ts.map b/node_modules/@azure/core-util/dist/react-native/random.d.ts.map new file mode 100644 index 0000000..8a91316 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/random.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"random.d.ts","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAGA;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,MAAM,CAS1E"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/random.js b/node_modules/@azure/core-util/dist/react-native/random.js new file mode 100644 index 0000000..aef3288 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/random.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +export function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; +} +//# sourceMappingURL=random.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/random.js.map b/node_modules/@azure/core-util/dist/react-native/random.js.map new file mode 100644 index 0000000..93d4d74 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/random.js.map @@ -0,0 +1 @@ +{"version":3,"file":"random.js","sourceRoot":"","sources":["../../src/random.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,UAAU,yBAAyB,CAAC,GAAW,EAAE,GAAW;IAChE,iCAAiC;IACjC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACrB,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACtB,2DAA2D;IAC3D,+EAA+E;IAC/E,yEAAyE;IACzE,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;IAC3D,OAAO,MAAM,GAAG,GAAG,CAAC;AACtB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Returns a random integer value between a lower and upper bound,\n * inclusive of both bounds.\n * Note that this uses Math.random and isn't secure. If you need to use\n * this for any kind of security purpose, find a better source of random.\n * @param min - The smallest integer value allowed.\n * @param max - The largest integer value allowed.\n */\nexport function getRandomIntegerInclusive(min: number, max: number): number {\n // Make sure inputs are integers.\n min = Math.ceil(min);\n max = Math.floor(max);\n // Pick a random offset from zero to the size of the range.\n // Since Math.random() can never return 1, we have to make the range one larger\n // in order to be inclusive of the maximum value after we take the floor.\n const offset = Math.floor(Math.random() * (max - min + 1));\n return offset + min;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/sha256.d.ts b/node_modules/@azure/core-util/dist/react-native/sha256.d.ts new file mode 100644 index 0000000..a4b7b98 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/sha256.d.ts @@ -0,0 +1,14 @@ +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export declare function computeSha256Hmac(key: string, stringToSign: string, encoding: "base64" | "hex"): Promise; +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export declare function computeSha256Hash(content: string, encoding: "base64" | "hex"): Promise; +//# sourceMappingURL=sha256.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/sha256.d.ts.map b/node_modules/@azure/core-util/dist/react-native/sha256.d.ts.map new file mode 100644 index 0000000..fffa8fc --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/sha256.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256.d.ts","sourceRoot":"","sources":["../../src/sha256.ts"],"names":[],"mappings":"AAKA;;;;;GAKG;AACH,wBAAsB,iBAAiB,CACrC,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,MAAM,EACpB,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAIjB;AAED;;;;GAIG;AACH,wBAAsB,iBAAiB,CACrC,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,QAAQ,GAAG,KAAK,GACzB,OAAO,CAAC,MAAM,CAAC,CAEjB"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/sha256.js b/node_modules/@azure/core-util/dist/react-native/sha256.js new file mode 100644 index 0000000..6704d38 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/sha256.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHash, createHmac } from "crypto"; +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +export async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return createHmac("sha256", decodedKey).update(stringToSign).digest(encoding); +} +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +export async function computeSha256Hash(content, encoding) { + return createHash("sha256").update(content).digest(encoding); +} +//# sourceMappingURL=sha256.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/sha256.js.map b/node_modules/@azure/core-util/dist/react-native/sha256.js.map new file mode 100644 index 0000000..2f3932e --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/sha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sha256.js","sourceRoot":"","sources":["../../src/sha256.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEhD;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,GAAW,EACX,YAAoB,EACpB,QAA0B;IAE1B,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;IAE9C,OAAO,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAChF,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,OAAe,EACf,QAA0B;IAE1B,OAAO,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAC/D,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHash, createHmac } from \"crypto\";\n\n/**\n * Generates a SHA-256 HMAC signature.\n * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash.\n * @param stringToSign - The data to be signed.\n * @param encoding - The textual encoding to use for the returned HMAC digest.\n */\nexport async function computeSha256Hmac(\n key: string,\n stringToSign: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n const decodedKey = Buffer.from(key, \"base64\");\n\n return createHmac(\"sha256\", decodedKey).update(stringToSign).digest(encoding);\n}\n\n/**\n * Generates a SHA-256 hash.\n * @param content - The data to be included in the hash.\n * @param encoding - The textual encoding to use for the returned hash.\n */\nexport async function computeSha256Hash(\n content: string,\n encoding: \"base64\" | \"hex\",\n): Promise {\n return createHash(\"sha256\").update(content).digest(encoding);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/typeGuards.d.ts b/node_modules/@azure/core-util/dist/react-native/typeGuards.d.ts new file mode 100644 index 0000000..50ccc5d --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/typeGuards.d.ts @@ -0,0 +1,18 @@ +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export declare function isObjectWithProperties(thing: Thing, properties: PropertyName[]): thing is Thing & Record; +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export declare function objectHasProperty(thing: Thing, property: PropertyName): thing is Thing & Record; +//# sourceMappingURL=typeGuards.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/typeGuards.d.ts.map b/node_modules/@azure/core-util/dist/react-native/typeGuards.d.ts.map new file mode 100644 index 0000000..f20f041 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/typeGuards.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.d.ts","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,CAEpE;AAED;;;;GAIG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EACvE,KAAK,EAAE,KAAK,EACZ,UAAU,EAAE,YAAY,EAAE,GACzB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAYhD;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,YAAY,SAAS,MAAM,EAClE,KAAK,EAAE,KAAK,EACZ,QAAQ,EAAE,YAAY,GACrB,KAAK,IAAI,KAAK,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,CAIhD"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/typeGuards.js b/node_modules/@azure/core-util/dist/react-native/typeGuards.js new file mode 100644 index 0000000..f45852d --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/typeGuards.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +export function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +export function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +export function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/typeGuards.js.map b/node_modules/@azure/core-util/dist/react-native/typeGuards.js.map new file mode 100644 index 0000000..f6586c3 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/typeGuards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeGuards.js","sourceRoot":"","sources":["../../src/typeGuards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,SAAS,CAAI,KAA2B;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;QACnD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE,CAAC;QAClC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACxC,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAC/B,KAAY,EACZ,QAAsB;IAEtB,OAAO,CACL,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAChG,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[],\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n */\nexport function objectHasProperty(\n thing: Thing,\n property: PropertyName,\n): thing is Thing & Record {\n return (\n isDefined(thing) && typeof thing === \"object\" && property in (thing as Record)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils-react-native.d.mts.map b/node_modules/@azure/core-util/dist/react-native/uuidUtils-react-native.d.mts.map new file mode 100644 index 0000000..edee035 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils-react-native.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils-react-native.d.mts","sourceRoot":"","sources":["../../src/uuidUtils-react-native.mts"],"names":[],"mappings":"AAGA,OAAO,EAAE,UAAU,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils-react-native.mjs.map b/node_modules/@azure/core-util/dist/react-native/uuidUtils-react-native.mjs.map new file mode 100644 index 0000000..8a5debf --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils-react-native.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils-react-native.mjs","sourceRoot":"","sources":["../../src/uuidUtils-react-native.mts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { randomUUID } from \"./uuidUtils.common.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.d.ts b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.d.ts new file mode 100644 index 0000000..8f1c9ba --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.d.ts @@ -0,0 +1,13 @@ +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUUID(): string; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export declare function randomUUID(): string; +//# sourceMappingURL=uuidUtils.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.d.ts.map b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.d.ts.map new file mode 100644 index 0000000..0d19e4a --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.d.ts","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAqBrC;AAED;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAEnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.js b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.js new file mode 100644 index 0000000..8484013 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function generateUUID() { + let uuid = ""; + for (let i = 0; i < 32; i++) { + // Generate a random number between 0 and 15 + const randomNumber = Math.floor(Math.random() * 16); + // Set the UUID version to 4 in the 13th position + if (i === 12) { + uuid += "4"; + } + else if (i === 16) { + // Set the UUID variant to "10" in the 17th position + uuid += (randomNumber & 0x3) | 0x8; + } + else { + // Add a random hexadecimal digit to the UUID string + uuid += randomNumber.toString(16); + } + // Add hyphens to the UUID string at the appropriate positions + if (i === 7 || i === 11 || i === 15 || i === 19) { + uuid += "-"; + } + } + return uuid; +} +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +export function randomUUID() { + return generateUUID(); +} +//# sourceMappingURL=uuidUtils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.js.map b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.js.map new file mode 100644 index 0000000..78e1c16 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uuidUtils.common.js","sourceRoot":"","sources":["../../src/uuidUtils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,IAAI,IAAI,GAAG,EAAE,CAAC;IACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5B,4CAA4C;QAC5C,MAAM,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC;QACpD,iDAAiD;QACjD,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACb,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;aAAM,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YACpB,oDAAoD;YACpD,IAAI,IAAI,CAAC,YAAY,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QACrC,CAAC;aAAM,CAAC;YACN,oDAAoD;YACpD,IAAI,IAAI,YAAY,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;QACD,8DAA8D;QAC9D,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;YAChD,IAAI,IAAI,GAAG,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU;IACxB,OAAO,YAAY,EAAE,CAAC;AACxB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUUID(): string {\n let uuid = \"\";\n for (let i = 0; i < 32; i++) {\n // Generate a random number between 0 and 15\n const randomNumber = Math.floor(Math.random() * 16);\n // Set the UUID version to 4 in the 13th position\n if (i === 12) {\n uuid += \"4\";\n } else if (i === 16) {\n // Set the UUID variant to \"10\" in the 17th position\n uuid += (randomNumber & 0x3) | 0x8;\n } else {\n // Add a random hexadecimal digit to the UUID string\n uuid += randomNumber.toString(16);\n }\n // Add hyphens to the UUID string at the appropriate positions\n if (i === 7 || i === 11 || i === 15 || i === 19) {\n uuid += \"-\";\n }\n }\n return uuid;\n}\n\n/**\n * Generated Universally Unique Identifier\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function randomUUID(): string {\n return generateUUID();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils.d.ts b/node_modules/@azure/core-util/dist/react-native/uuidUtils.d.ts new file mode 100644 index 0000000..e954fc2 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils.d.ts @@ -0,0 +1,2 @@ +export { randomUUID } from "./uuidUtils.common.js"; +//# sourceMappingURL=uuidUtils-react-native.d.mts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/uuidUtils.js b/node_modules/@azure/core-util/dist/react-native/uuidUtils.js new file mode 100644 index 0000000..2207c73 --- /dev/null +++ b/node_modules/@azure/core-util/dist/react-native/uuidUtils.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { randomUUID } from "./uuidUtils.common.js"; +//# sourceMappingURL=uuidUtils-react-native.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/LICENSE b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/README.md b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/README.md new file mode 100644 index 0000000..2c8a197 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/README.md @@ -0,0 +1,69 @@ +# Azure Abort Controller client library for JavaScript + +The `@azure/abort-controller` package provides `AbortSignalLike` interface and +`AbortError` classes to make it easier to work with the +[AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) + and the `AbortSignal` used by +[fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API) built into modern JavaScript platforms. + +Customers of Azure SDK for JavaScript in general do not need to use this library. Instead they +use `AbortController` and `AbortSignal` provided by their platforms and pass the abort signals to Azure SDK operations. + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller) +- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme) + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/abort-controller +``` + +## Key Concepts + +Use `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel +pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`. +An `AbortSignal` can also be returned directly from a static method, e.g. `AbortSignal.timeout(100)`. +that is cancelled after 100 milliseconds. + +## Examples + +The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is +of the abort signal. + +### Example 1 - basic usage + +```js +const controller = new AbortController(); +doAsyncWork({ abortSignal: controller.signal }); + +// at some point later +controller.abort(); +``` + +### Example 2 - Aborting with timeout + +```js +const signal = AbortSignal.timeout(1000); +doAsyncWork({ abortSignal: signal }); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fabort-controller%2FREADME.png) diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/abort-controller.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/abort-controller.d.ts new file mode 100644 index 0000000..902fd4d --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/abort-controller.d.ts @@ -0,0 +1,42 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} + +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export declare interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} + +export { } diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/package.json b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js new file mode 100644 index 0000000..c7a0749 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js @@ -0,0 +1,31 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map new file mode 100644 index 0000000..0514343 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAa,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AALD,gCAKC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js new file mode 100644 index 0000000..aa54840 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map new file mode 100644 index 0000000..5fea0c4 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js new file mode 100644 index 0000000..b589ef0 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbortError = void 0; +var AbortError_js_1 = require("./AbortError.js"); +Object.defineProperty(exports, "AbortError", { enumerable: true, get: function () { return AbortError_js_1.AbortError; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js.map new file mode 100644 index 0000000..48ef08c --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;AAMlC,iDAA6C;AAApC,2GAAA,UAAU,OAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/package.json b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/package.json b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts new file mode 100644 index 0000000..8bb3125 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts @@ -0,0 +1,22 @@ +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +//# sourceMappingURL=AbortError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map new file mode 100644 index 0000000..fe8c207 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.d.ts","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.js new file mode 100644 index 0000000..3ae9745 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +//# sourceMappingURL=AbortError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map new file mode 100644 index 0000000..3fb9661 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortError.js","sourceRoot":"","sources":["../../src/AbortError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts new file mode 100644 index 0000000..8a19586 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts @@ -0,0 +1,19 @@ +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +//# sourceMappingURL=AbortSignalLike.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map new file mode 100644 index 0000000..512275d --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.d.ts","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js new file mode 100644 index 0000000..b6d9f47 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=AbortSignalLike.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map new file mode 100644 index 0000000..8a12c70 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/AbortSignalLike.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignalLike.js","sourceRoot":"","sources":["../../src/AbortSignalLike.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any,\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.d.ts b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.d.ts new file mode 100644 index 0000000..1998130 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.d.ts @@ -0,0 +1,7 @@ +declare global { + interface Event { + } +} +export { AbortError } from "./AbortError.js"; +export { AbortSignalLike } from "./AbortSignalLike.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..ff92652 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,KAAK;KAAG;CACnB;AAED,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.js b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.js new file mode 100644 index 0000000..68c507c --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AbortError } from "./AbortError.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.js.map b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.js.map new file mode 100644 index 0000000..1604023 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\ndeclare global {\n interface Event {}\n}\n\nexport { AbortError } from \"./AbortError.js\";\nexport { AbortSignalLike } from \"./AbortSignalLike.js\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/package.json b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-util/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/package.json new file mode 100644 index 0000000..d8375e5 --- /dev/null +++ b/node_modules/@azure/core-util/node_modules/@azure/abort-controller/package.json @@ -0,0 +1,116 @@ +{ + "name": "@azure/abort-controller", + "sdk-type": "client", + "version": "2.1.1", + "description": "Microsoft Azure SDK for JavaScript - Aborter", + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sideEffects": false, + "type": "module", + "main": "./dist/commonjs/index.js", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=18.0.0" + }, + "keywords": [ + "azure", + "aborter", + "abortsignal", + "cancellation", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,mjs,cjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,mjs,cjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-util/package.json b/node_modules/@azure/core-util/package.json new file mode 100644 index 0000000..7edd696 --- /dev/null +++ b/node_modules/@azure/core-util/package.json @@ -0,0 +1,112 @@ +{ + "name": "@azure/core-util", + "version": "1.8.1", + "description": "Core library for shared utility methods", + "sdk-type": "client", + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=18.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-util/", + "sideEffects": false, + "prettier": "@azure/eslint-plugin-azure-sdk/prettier.json", + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure-tools/vite-plugin-browser-test-map": "^1.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/logger/LICENSE b/node_modules/@azure/logger/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/logger/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/logger/README.md b/node_modules/@azure/logger/README.md new file mode 100644 index 0000000..058b5db --- /dev/null +++ b/node_modules/@azure/logger/README.md @@ -0,0 +1,132 @@ +# Azure Logger client library for JavaScript + +The `@azure/logger` package can be used to enable logging in the Azure SDKs for JavaScript. + +Logging can be enabled for the Azure SDK in the following ways: + +- Setting the AZURE_LOG_LEVEL environment variable +- Calling setLogLevel imported from "@azure/logger" +- Calling enable() on specific loggers +- Using the `DEBUG` environment variable. + +Note that AZURE_LOG_LEVEL, if set, takes precedence over DEBUG. Only use DEBUG without specifying AZURE_LOG_LEVEL or calling setLogLevel. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/logger +``` + +## Key Concepts + +The `@azure/logger` package supports the following log levels +specified in order of most verbose to least verbose: + +- verbose +- info +- warning +- error + +When setting a log level, either programmatically or via the `AZURE_LOG_LEVEL` environment variable, +any logs that are written using a log level equal to or less than the one you choose +will be emitted. + +For example, setting the log level to `warning` will cause all logs that have the log +level `warning` or `error` to be emitted. + + +**NOTE**: When logging requests and responses, we sanitize these objects to make sure things like `Authorization` headers that contain secrets are not logged. + +Request and response bodies are never logged. Headers are redacted by default, unless present in the following list or explicitly allowed by the client SDK: +- "x-ms-client-request-id", +- "x-ms-return-client-request-id", +- "x-ms-useragent", +- "x-ms-correlation-request-id", +- "x-ms-request-id", +- "client-request-id", +- "ms-cv", +- "return-client-request-id", +- "traceparent", +- "Access-Control-Allow-Credentials", +- "Access-Control-Allow-Headers", +- "Access-Control-Allow-Methods", +- "Access-Control-Allow-Origin", +- "Access-Control-Expose-Headers", +- "Access-Control-Max-Age", +- "Access-Control-Request-Headers", +- "Access-Control-Request-Method", +- "Origin", +- "Accept", +- "Accept-Encoding", +- "Cache-Control", +- "Connection", +- "Content-Length", +- "Content-Type", +- "Date", +- "ETag", +- "Expires", +- "If-Match", +- "If-Modified-Since", +- "If-None-Match", +- "If-Unmodified-Since", +- "Last-Modified", +- "Pragma", +- "Request-Id", +- "Retry-After", +- "Server", +- "Transfer-Encoding", +- "User-Agent", +- "WWW-Authenticate", + +## Examples + +### Example 1 - basic usage + +```js +const { EventHubClient } = require('@azure/event-hubs'); + +const logger = require('@azure/logger'); +logger.setLogLevel('info'); + +// operations will now emit info, warning, and error logs +const client = new EventHubClient(/* params */); +client.getPartitionIds() + .then(ids => { /* do work */ }) + .catch(e => { /* do work */ }); +}); +``` + +### Example 2 - redirect log output + +```js +const { AzureLogger, setLogLevel } = require("@azure/logger"); + +setLogLevel("verbose"); + +// override logging to output to console.log (default location is stderr) +AzureLogger.log = (...args) => { + console.log(...args); +}; +``` + +Using `AzureLogger`, it is possible to redirect the logging output from the Azure SDKs by +overriding the `AzureLogger.log` method. This may be useful if you want to redirect logs to +a location other than stderr. + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Flogger%2FREADME.png) diff --git a/node_modules/@azure/logger/dist/browser/debug.d.ts b/node_modules/@azure/logger/dist/browser/debug.d.ts new file mode 100644 index 0000000..6b02b18 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/debug.d.ts @@ -0,0 +1,63 @@ +/** + * A simple mechanism for enabling logging. + * Intended to mimic the publicly available `debug` package. + */ +export interface Debug { + /** + * Creates a new logger with the given namespace. + */ + (namespace: string): Debugger; + /** + * The default log method (defaults to console) + */ + log: (...args: any[]) => void; + /** + * Enables a particular set of namespaces. + * To enable multiple separate them with commas, e.g. "info,debug". + * Supports wildcards, e.g. "azure:*" + * Supports skip syntax, e.g. "azure:*,-azure:storage:*" will enable + * everything under azure except for things under azure:storage. + */ + enable: (namespaces: string) => void; + /** + * Checks if a particular namespace is enabled. + */ + enabled: (namespace: string) => boolean; + /** + * Disables all logging, returns what was previously enabled. + */ + disable: () => string; +} +/** + * A log function that can be dynamically enabled and redirected. + */ +export interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} +declare const debugObj: Debug; +export default debugObj; +//# sourceMappingURL=debug.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/debug.d.ts.map b/node_modules/@azure/logger/dist/browser/debug.d.ts.map new file mode 100644 index 0000000..9de8e8c --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/debug.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,MAAM,WAAW,KAAK;IACpB;;OAEG;IACH,CAAC,SAAS,EAAE,MAAM,GAAG,QAAQ,CAAC;IAC9B;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;;;;;OAMG;IACH,MAAM,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC;;OAEG;IACH,OAAO,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC;IACxC;;OAEG;IACH,OAAO,EAAE,MAAM,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB;;OAEG;IACH,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACvB;;OAEG;IACH,OAAO,EAAE,OAAO,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,OAAO,CAAC;IACvB;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;;OAGG;IACH,MAAM,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,QAAQ,CAAC;CACzC;AAcD,QAAA,MAAM,QAAQ,EAAE,KAUf,CAAC;AAmFF,eAAe,QAAQ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/debug.js b/node_modules/@azure/logger/dist/browser/debug.js new file mode 100644 index 0000000..cf49a67 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/debug.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { log } from "./log.js"; +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +export default debugObj; +//# sourceMappingURL=debug.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/debug.js.map b/node_modules/@azure/logger/dist/browser/debug.js.map new file mode 100644 index 0000000..68d85aa --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/debug.js.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAgE/B,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE,CAAC;IACrB,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3B,CAAC;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB,EAAY,EAAE;IAC9B,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE,CAAC;QAC/B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACN,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;IACjD,CAAC;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QAC5B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE,CAAC;QACxC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YAC5B,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE,CAAC;QACjD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YACrC,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;YACzB,OAAO;QACT,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE,CAAC;QACf,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,eAAe,QAAQ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log.js\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log,\n },\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend,\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/index.d.ts b/node_modules/@azure/logger/dist/browser/index.d.ts new file mode 100644 index 0000000..1899271 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/index.d.ts @@ -0,0 +1,68 @@ +import { type Debugger } from "./debug.js"; +export type { Debugger } from "./debug.js"; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export type AzureLogLevel = "verbose" | "info" | "warning" | "error"; +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export type AzureClientLogger = Debugger; +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; +/** + * Defines the methods available on the SDK-facing logger. + */ +export interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed troubleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/index.d.ts.map b/node_modules/@azure/logger/dist/browser/index.d.ts.map new file mode 100644 index 0000000..405477e --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAc,EAAE,KAAK,QAAQ,EAAE,MAAM,YAAY,CAAC;AAClD,YAAY,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAQ3C;;;;GAIG;AACH,eAAO,MAAM,WAAW,EAAE,iBAAkC,CAAC;AAK7D;;;;;;;GAOG;AACH,MAAM,MAAM,aAAa,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC;AAKrE;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,QAAQ,CAAC;AAezC;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CAAC,KAAK,CAAC,EAAE,aAAa,GAAG,IAAI,CAgBvD;AAED;;GAEG;AACH,wBAAgB,WAAW,IAAI,aAAa,GAAG,SAAS,CAEvD;AASD;;GAEG;AAEH,MAAM,WAAW,WAAW;IAC1B;;;OAGG;IACH,KAAK,EAAE,QAAQ,CAAC;IAChB;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;IAClB;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;IACf;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,WAAW,CASjE"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/index.js b/node_modules/@azure/logger/dist/browser/index.js new file mode 100644 index 0000000..6e9ca26 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/index.js @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import debug from "./debug.js"; +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export const AzureLogger = debug("azure"); +AzureLogger.log = (...args) => { + debug.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +export function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug.disable(); + debug.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/index.js.map b/node_modules/@azure/logger/dist/browser/index.js.map new file mode 100644 index 0000000..1c399d6 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAwB,MAAM,YAAY,CAAC;AAGlD,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;GAIG;AACH,MAAM,CAAC,MAAM,WAAW,GAAsB,KAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC5B,KAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE,CAAC;IACpB,0FAA0F;IAC1F,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE,CAAC;QACrC,WAAW,CAAC,eAAe,CAAC,CAAC;IAC/B,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;IACJ,CAAC;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE,CAAC;QACvC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;YACzB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,CAAC;IACH,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;QACtB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IACtB,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;QACzB,MAAM,iBAAiB,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;QAC1C,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3D,CAAC;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,OAAO,OAAO,CAAC,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC;AACrF,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { type Debugger } from \"./debug.js\";\nexport type { Debugger } from \"./debug.js\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \",\n )}.`,\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level. If no level is specified, logging is disabled.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`,\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100,\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed troubleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\"),\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level,\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]);\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/log-browser.d.mts.map b/node_modules/@azure/logger/dist/browser/log-browser.d.mts.map new file mode 100644 index 0000000..563b0d8 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/log-browser.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"log-browser.d.mts","sourceRoot":"","sources":["../../src/log-browser.mts"],"names":[],"mappings":"AAGA,wBAAgB,GAAG,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAexC"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/log-browser.mjs.map b/node_modules/@azure/logger/dist/browser/log-browser.mjs.map new file mode 100644 index 0000000..5564b7e --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/log-browser.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"log-browser.mjs","sourceRoot":"","sources":["../../src/log-browser.mts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,UAAU,GAAG,CAAC,GAAG,IAAW;IAChC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACpB,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;YAChC,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QACzB,CAAC;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;QACxB,CAAC;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;YACtC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;QACxB,CAAC;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC;YACzC,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QACzB,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QACzB,CAAC;IACH,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport function log(...args: any[]): void {\n if (args.length > 0) {\n const firstArg = String(args[0]);\n if (firstArg.includes(\":error\")) {\n console.error(...args);\n } else if (firstArg.includes(\":warning\")) {\n console.warn(...args);\n } else if (firstArg.includes(\":info\")) {\n console.info(...args);\n } else if (firstArg.includes(\":verbose\")) {\n console.debug(...args);\n } else {\n console.debug(...args);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/log.d.ts b/node_modules/@azure/logger/dist/browser/log.d.ts new file mode 100644 index 0000000..9d43c0f --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/log.d.ts @@ -0,0 +1,2 @@ +export declare function log(...args: any[]): void; +//# sourceMappingURL=log-browser.d.mts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/log.js b/node_modules/@azure/logger/dist/browser/log.js new file mode 100644 index 0000000..46c4950 --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/log.js @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function log(...args) { + if (args.length > 0) { + const firstArg = String(args[0]); + if (firstArg.includes(":error")) { + console.error(...args); + } + else if (firstArg.includes(":warning")) { + console.warn(...args); + } + else if (firstArg.includes(":info")) { + console.info(...args); + } + else if (firstArg.includes(":verbose")) { + console.debug(...args); + } + else { + console.debug(...args); + } + } +} +//# sourceMappingURL=log-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/browser/package.json b/node_modules/@azure/logger/dist/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/logger/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/logger/dist/commonjs/debug.d.ts b/node_modules/@azure/logger/dist/commonjs/debug.d.ts new file mode 100644 index 0000000..6b02b18 --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/debug.d.ts @@ -0,0 +1,63 @@ +/** + * A simple mechanism for enabling logging. + * Intended to mimic the publicly available `debug` package. + */ +export interface Debug { + /** + * Creates a new logger with the given namespace. + */ + (namespace: string): Debugger; + /** + * The default log method (defaults to console) + */ + log: (...args: any[]) => void; + /** + * Enables a particular set of namespaces. + * To enable multiple separate them with commas, e.g. "info,debug". + * Supports wildcards, e.g. "azure:*" + * Supports skip syntax, e.g. "azure:*,-azure:storage:*" will enable + * everything under azure except for things under azure:storage. + */ + enable: (namespaces: string) => void; + /** + * Checks if a particular namespace is enabled. + */ + enabled: (namespace: string) => boolean; + /** + * Disables all logging, returns what was previously enabled. + */ + disable: () => string; +} +/** + * A log function that can be dynamically enabled and redirected. + */ +export interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} +declare const debugObj: Debug; +export default debugObj; +//# sourceMappingURL=debug.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/debug.d.ts.map b/node_modules/@azure/logger/dist/commonjs/debug.d.ts.map new file mode 100644 index 0000000..9de8e8c --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/debug.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,MAAM,WAAW,KAAK;IACpB;;OAEG;IACH,CAAC,SAAS,EAAE,MAAM,GAAG,QAAQ,CAAC;IAC9B;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;;;;;OAMG;IACH,MAAM,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC;;OAEG;IACH,OAAO,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC;IACxC;;OAEG;IACH,OAAO,EAAE,MAAM,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB;;OAEG;IACH,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACvB;;OAEG;IACH,OAAO,EAAE,OAAO,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,OAAO,CAAC;IACvB;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;;OAGG;IACH,MAAM,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,QAAQ,CAAC;CACzC;AAcD,QAAA,MAAM,QAAQ,EAAE,KAUf,CAAC;AAmFF,eAAe,QAAQ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/debug.js b/node_modules/@azure/logger/dist/commonjs/debug.js new file mode 100644 index 0000000..d43a88d --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/debug.js @@ -0,0 +1,95 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +const log_js_1 = require("./log.js"); +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log: log_js_1.log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +exports.default = debugObj; +//# sourceMappingURL=debug.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/debug.js.map b/node_modules/@azure/logger/dist/commonjs/debug.js.map new file mode 100644 index 0000000..373ad7c --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/debug.js.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;AAElC,qCAA+B;AAgE/B,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE,CAAC;IACrB,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3B,CAAC;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB,EAAY,EAAE;IAC9B,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG,EAAH,YAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE,CAAC;QAC/B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACN,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;IACjD,CAAC;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QAC5B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE,CAAC;QACxC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YAC5B,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE,CAAC;QACjD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YACrC,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;YACzB,OAAO;QACT,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE,CAAC;QACf,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,kBAAe,QAAQ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log.js\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log,\n },\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend,\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/index.d.ts b/node_modules/@azure/logger/dist/commonjs/index.d.ts new file mode 100644 index 0000000..1899271 --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/index.d.ts @@ -0,0 +1,68 @@ +import { type Debugger } from "./debug.js"; +export type { Debugger } from "./debug.js"; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export type AzureLogLevel = "verbose" | "info" | "warning" | "error"; +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export type AzureClientLogger = Debugger; +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; +/** + * Defines the methods available on the SDK-facing logger. + */ +export interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed troubleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/index.d.ts.map b/node_modules/@azure/logger/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000..405477e --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAc,EAAE,KAAK,QAAQ,EAAE,MAAM,YAAY,CAAC;AAClD,YAAY,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAQ3C;;;;GAIG;AACH,eAAO,MAAM,WAAW,EAAE,iBAAkC,CAAC;AAK7D;;;;;;;GAOG;AACH,MAAM,MAAM,aAAa,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC;AAKrE;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,QAAQ,CAAC;AAezC;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CAAC,KAAK,CAAC,EAAE,aAAa,GAAG,IAAI,CAgBvD;AAED;;GAEG;AACH,wBAAgB,WAAW,IAAI,aAAa,GAAG,SAAS,CAEvD;AASD;;GAEG;AAEH,MAAM,WAAW,WAAW;IAC1B;;;OAGG;IACH,KAAK,EAAE,QAAQ,CAAC;IAChB;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;IAClB;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;IACf;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,WAAW,CASjE"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/index.js b/node_modules/@azure/logger/dist/commonjs/index.js new file mode 100644 index 0000000..45db7e3 --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/index.js @@ -0,0 +1,105 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createClientLogger = exports.getLogLevel = exports.setLogLevel = exports.AzureLogger = void 0; +const tslib_1 = require("tslib"); +const debug_js_1 = tslib_1.__importDefault(require("./debug.js")); +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +exports.AzureLogger = (0, debug_js_1.default)("azure"); +exports.AzureLogger.log = (...args) => { + debug_js_1.default.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug_js_1.default.enable(enabledNamespaces.join(",")); +} +exports.setLogLevel = setLogLevel; +/** + * Retrieves the currently specified log level. + */ +function getLogLevel() { + return azureLogLevel; +} +exports.getLogLevel = getLogLevel; +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +function createClientLogger(namespace) { + const clientRootLogger = exports.AzureLogger.extend(namespace); + patchLogMethod(exports.AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +exports.createClientLogger = createClientLogger; +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug_js_1.default.disable(); + debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/index.js.map b/node_modules/@azure/logger/dist/commonjs/index.js.map new file mode 100644 index 0000000..0dcb175 --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;;AAElC,kEAAkD;AAGlD,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;GAIG;AACU,QAAA,WAAW,GAAsB,IAAA,kBAAK,EAAC,OAAO,CAAC,CAAC;AAC7D,mBAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC5B,kBAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE,CAAC;IACpB,0FAA0F;IAC1F,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE,CAAC;QACrC,WAAW,CAAC,eAAe,CAAC,CAAC;IAC/B,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;GAQG;AACH,SAAgB,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;IACJ,CAAC;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE,CAAC;QACvC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;YACzB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,CAAC;IACH,CAAC;IAED,kBAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAhBD,kCAgBC;AAED;;GAEG;AACH,SAAgB,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAFD,kCAEC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,mBAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,mBAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AATD,gDASC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;QACtB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IACtB,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;QACzB,MAAM,iBAAiB,GAAG,kBAAK,CAAC,OAAO,EAAE,CAAC;QAC1C,kBAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3D,CAAC;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,OAAO,OAAO,CAAC,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC;AACrF,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { type Debugger } from \"./debug.js\";\nexport type { Debugger } from \"./debug.js\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \",\n )}.`,\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level. If no level is specified, logging is disabled.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`,\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100,\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed troubleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\"),\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level,\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]);\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/log.d.ts b/node_modules/@azure/logger/dist/commonjs/log.d.ts new file mode 100644 index 0000000..d835a2c --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/log.d.ts @@ -0,0 +1,2 @@ +export declare function log(message: unknown, ...args: any[]): void; +//# sourceMappingURL=log.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/log.d.ts.map b/node_modules/@azure/logger/dist/commonjs/log.d.ts.map new file mode 100644 index 0000000..470aea2 --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/log.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAOA,wBAAgB,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAE1D"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/log.js b/node_modules/@azure/logger/dist/commonjs/log.js new file mode 100644 index 0000000..581081c --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/log.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.log = void 0; +const tslib_1 = require("tslib"); +const node_os_1 = require("node:os"); +const node_util_1 = tslib_1.__importDefault(require("node:util")); +const process = tslib_1.__importStar(require("node:process")); +function log(message, ...args) { + process.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); +} +exports.log = log; +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/log.js.map b/node_modules/@azure/logger/dist/commonjs/log.js.map new file mode 100644 index 0000000..e59337d --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;;;;AAElC,qCAA8B;AAC9B,kEAA6B;AAC7B,8DAAwC;AAExC,SAAgB,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,mBAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAG,aAAG,EAAE,CAAC,CAAC;AACjE,CAAC;AAFD,kBAEC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EOL } from \"node:os\";\nimport util from \"node:util\";\nimport * as process from \"node:process\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/commonjs/package.json b/node_modules/@azure/logger/dist/commonjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 0000000..22735db --- /dev/null +++ b/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.42.3" + } + ] +} diff --git a/node_modules/@azure/logger/dist/esm/debug.d.ts b/node_modules/@azure/logger/dist/esm/debug.d.ts new file mode 100644 index 0000000..6b02b18 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/debug.d.ts @@ -0,0 +1,63 @@ +/** + * A simple mechanism for enabling logging. + * Intended to mimic the publicly available `debug` package. + */ +export interface Debug { + /** + * Creates a new logger with the given namespace. + */ + (namespace: string): Debugger; + /** + * The default log method (defaults to console) + */ + log: (...args: any[]) => void; + /** + * Enables a particular set of namespaces. + * To enable multiple separate them with commas, e.g. "info,debug". + * Supports wildcards, e.g. "azure:*" + * Supports skip syntax, e.g. "azure:*,-azure:storage:*" will enable + * everything under azure except for things under azure:storage. + */ + enable: (namespaces: string) => void; + /** + * Checks if a particular namespace is enabled. + */ + enabled: (namespace: string) => boolean; + /** + * Disables all logging, returns what was previously enabled. + */ + disable: () => string; +} +/** + * A log function that can be dynamically enabled and redirected. + */ +export interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} +declare const debugObj: Debug; +export default debugObj; +//# sourceMappingURL=debug.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/debug.d.ts.map b/node_modules/@azure/logger/dist/esm/debug.d.ts.map new file mode 100644 index 0000000..9de8e8c --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/debug.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,MAAM,WAAW,KAAK;IACpB;;OAEG;IACH,CAAC,SAAS,EAAE,MAAM,GAAG,QAAQ,CAAC;IAC9B;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;;;;;OAMG;IACH,MAAM,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC;;OAEG;IACH,OAAO,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC;IACxC;;OAEG;IACH,OAAO,EAAE,MAAM,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB;;OAEG;IACH,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACvB;;OAEG;IACH,OAAO,EAAE,OAAO,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,OAAO,CAAC;IACvB;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;;OAGG;IACH,MAAM,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,QAAQ,CAAC;CACzC;AAcD,QAAA,MAAM,QAAQ,EAAE,KAUf,CAAC;AAmFF,eAAe,QAAQ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/debug.js b/node_modules/@azure/logger/dist/esm/debug.js new file mode 100644 index 0000000..cf49a67 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/debug.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { log } from "./log.js"; +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +export default debugObj; +//# sourceMappingURL=debug.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/debug.js.map b/node_modules/@azure/logger/dist/esm/debug.js.map new file mode 100644 index 0000000..68d85aa --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/debug.js.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAgE/B,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE,CAAC;IACrB,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3B,CAAC;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB,EAAY,EAAE;IAC9B,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE,CAAC;QAC/B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACN,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;IACjD,CAAC;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QAC5B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE,CAAC;QACxC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YAC5B,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE,CAAC;QACjD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YACrC,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;YACzB,OAAO;QACT,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE,CAAC;QACf,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,eAAe,QAAQ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log.js\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log,\n },\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend,\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/index.d.ts b/node_modules/@azure/logger/dist/esm/index.d.ts new file mode 100644 index 0000000..1899271 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/index.d.ts @@ -0,0 +1,68 @@ +import { type Debugger } from "./debug.js"; +export type { Debugger } from "./debug.js"; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export type AzureLogLevel = "verbose" | "info" | "warning" | "error"; +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export type AzureClientLogger = Debugger; +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; +/** + * Defines the methods available on the SDK-facing logger. + */ +export interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed troubleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/index.d.ts.map b/node_modules/@azure/logger/dist/esm/index.d.ts.map new file mode 100644 index 0000000..405477e --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAc,EAAE,KAAK,QAAQ,EAAE,MAAM,YAAY,CAAC;AAClD,YAAY,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAQ3C;;;;GAIG;AACH,eAAO,MAAM,WAAW,EAAE,iBAAkC,CAAC;AAK7D;;;;;;;GAOG;AACH,MAAM,MAAM,aAAa,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC;AAKrE;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,QAAQ,CAAC;AAezC;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CAAC,KAAK,CAAC,EAAE,aAAa,GAAG,IAAI,CAgBvD;AAED;;GAEG;AACH,wBAAgB,WAAW,IAAI,aAAa,GAAG,SAAS,CAEvD;AASD;;GAEG;AAEH,MAAM,WAAW,WAAW;IAC1B;;;OAGG;IACH,KAAK,EAAE,QAAQ,CAAC;IAChB;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;IAClB;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;IACf;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,WAAW,CASjE"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/index.js b/node_modules/@azure/logger/dist/esm/index.js new file mode 100644 index 0000000..6e9ca26 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/index.js @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import debug from "./debug.js"; +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export const AzureLogger = debug("azure"); +AzureLogger.log = (...args) => { + debug.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +export function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug.disable(); + debug.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/index.js.map b/node_modules/@azure/logger/dist/esm/index.js.map new file mode 100644 index 0000000..1c399d6 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAwB,MAAM,YAAY,CAAC;AAGlD,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;GAIG;AACH,MAAM,CAAC,MAAM,WAAW,GAAsB,KAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC5B,KAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE,CAAC;IACpB,0FAA0F;IAC1F,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE,CAAC;QACrC,WAAW,CAAC,eAAe,CAAC,CAAC;IAC/B,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;IACJ,CAAC;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE,CAAC;QACvC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;YACzB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,CAAC;IACH,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;QACtB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IACtB,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;QACzB,MAAM,iBAAiB,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;QAC1C,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3D,CAAC;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,OAAO,OAAO,CAAC,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC;AACrF,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { type Debugger } from \"./debug.js\";\nexport type { Debugger } from \"./debug.js\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \",\n )}.`,\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level. If no level is specified, logging is disabled.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`,\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100,\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed troubleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\"),\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level,\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]);\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/log.d.ts b/node_modules/@azure/logger/dist/esm/log.d.ts new file mode 100644 index 0000000..d835a2c --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/log.d.ts @@ -0,0 +1,2 @@ +export declare function log(message: unknown, ...args: any[]): void; +//# sourceMappingURL=log.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/log.d.ts.map b/node_modules/@azure/logger/dist/esm/log.d.ts.map new file mode 100644 index 0000000..470aea2 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/log.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAOA,wBAAgB,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAE1D"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/log.js b/node_modules/@azure/logger/dist/esm/log.js new file mode 100644 index 0000000..ec432c9 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/log.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EOL } from "node:os"; +import util from "node:util"; +import * as process from "node:process"; +export function log(message, ...args) { + process.stderr.write(`${util.format(message, ...args)}${EOL}`); +} +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/log.js.map b/node_modules/@azure/logger/dist/esm/log.js.map new file mode 100644 index 0000000..1c64e18 --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,WAAW,CAAC;AAC7B,OAAO,KAAK,OAAO,MAAM,cAAc,CAAC;AAExC,MAAM,UAAU,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC;AACjE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EOL } from \"node:os\";\nimport util from \"node:util\";\nimport * as process from \"node:process\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/esm/package.json b/node_modules/@azure/logger/dist/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/logger/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/logger/dist/logger.d.ts b/node_modules/@azure/logger/dist/logger.d.ts new file mode 100644 index 0000000..68f3e8f --- /dev/null +++ b/node_modules/@azure/logger/dist/logger.d.ts @@ -0,0 +1,104 @@ +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export declare type AzureClientLogger = Debugger; + +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; + +/** + * Defines the methods available on the SDK-facing logger. + */ +export declare interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed troubleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} + +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare type AzureLogLevel = "verbose" | "info" | "warning" | "error"; + +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; + +/** + * A log function that can be dynamically enabled and redirected. + */ +export declare interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} + +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; + +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; + +export { } diff --git a/node_modules/@azure/logger/dist/react-native/debug.d.ts b/node_modules/@azure/logger/dist/react-native/debug.d.ts new file mode 100644 index 0000000..6b02b18 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/debug.d.ts @@ -0,0 +1,63 @@ +/** + * A simple mechanism for enabling logging. + * Intended to mimic the publicly available `debug` package. + */ +export interface Debug { + /** + * Creates a new logger with the given namespace. + */ + (namespace: string): Debugger; + /** + * The default log method (defaults to console) + */ + log: (...args: any[]) => void; + /** + * Enables a particular set of namespaces. + * To enable multiple separate them with commas, e.g. "info,debug". + * Supports wildcards, e.g. "azure:*" + * Supports skip syntax, e.g. "azure:*,-azure:storage:*" will enable + * everything under azure except for things under azure:storage. + */ + enable: (namespaces: string) => void; + /** + * Checks if a particular namespace is enabled. + */ + enabled: (namespace: string) => boolean; + /** + * Disables all logging, returns what was previously enabled. + */ + disable: () => string; +} +/** + * A log function that can be dynamically enabled and redirected. + */ +export interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} +declare const debugObj: Debug; +export default debugObj; +//# sourceMappingURL=debug.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/debug.d.ts.map b/node_modules/@azure/logger/dist/react-native/debug.d.ts.map new file mode 100644 index 0000000..9de8e8c --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/debug.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,MAAM,WAAW,KAAK;IACpB;;OAEG;IACH,CAAC,SAAS,EAAE,MAAM,GAAG,QAAQ,CAAC;IAC9B;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;;;;;OAMG;IACH,MAAM,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC;;OAEG;IACH,OAAO,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC;IACxC;;OAEG;IACH,OAAO,EAAE,MAAM,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB;;OAEG;IACH,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACvB;;OAEG;IACH,OAAO,EAAE,OAAO,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,OAAO,CAAC;IACvB;;OAEG;IACH,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAC9B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;;OAGG;IACH,MAAM,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,QAAQ,CAAC;CACzC;AAcD,QAAA,MAAM,QAAQ,EAAE,KAUf,CAAC;AAmFF,eAAe,QAAQ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/debug.js b/node_modules/@azure/logger/dist/react-native/debug.js new file mode 100644 index 0000000..cf49a67 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/debug.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { log } from "./log.js"; +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +export default debugObj; +//# sourceMappingURL=debug.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/debug.js.map b/node_modules/@azure/logger/dist/react-native/debug.js.map new file mode 100644 index 0000000..68d85aa --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/debug.js.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAgE/B,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE,CAAC;IACrB,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3B,CAAC;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB,EAAY,EAAE;IAC9B,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE,CAAC;QAC/B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACN,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;IACjD,CAAC;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QAC5B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE,CAAC;QACxC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YAC5B,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE,CAAC;QACjD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YACrC,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;YACzB,OAAO;QACT,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE,CAAC;QACf,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,eAAe,QAAQ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log.js\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log,\n },\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend,\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/index.d.ts b/node_modules/@azure/logger/dist/react-native/index.d.ts new file mode 100644 index 0000000..1899271 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/index.d.ts @@ -0,0 +1,68 @@ +import { type Debugger } from "./debug.js"; +export type { Debugger } from "./debug.js"; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export type AzureLogLevel = "verbose" | "info" | "warning" | "error"; +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export type AzureClientLogger = Debugger; +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; +/** + * Defines the methods available on the SDK-facing logger. + */ +export interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed troubleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/index.d.ts.map b/node_modules/@azure/logger/dist/react-native/index.d.ts.map new file mode 100644 index 0000000..405477e --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,OAAc,EAAE,KAAK,QAAQ,EAAE,MAAM,YAAY,CAAC;AAClD,YAAY,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAQ3C;;;;GAIG;AACH,eAAO,MAAM,WAAW,EAAE,iBAAkC,CAAC;AAK7D;;;;;;;GAOG;AACH,MAAM,MAAM,aAAa,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC;AAKrE;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,QAAQ,CAAC;AAezC;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CAAC,KAAK,CAAC,EAAE,aAAa,GAAG,IAAI,CAgBvD;AAED;;GAEG;AACH,wBAAgB,WAAW,IAAI,aAAa,GAAG,SAAS,CAEvD;AASD;;GAEG;AAEH,MAAM,WAAW,WAAW;IAC1B;;;OAGG;IACH,KAAK,EAAE,QAAQ,CAAC;IAChB;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;IAClB;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;IACf;;;;OAIG;IACH,OAAO,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,WAAW,CASjE"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/index.js b/node_modules/@azure/logger/dist/react-native/index.js new file mode 100644 index 0000000..6e9ca26 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/index.js @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import debug from "./debug.js"; +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export const AzureLogger = debug("azure"); +AzureLogger.log = (...args) => { + debug.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +export function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug.disable(); + debug.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/index.js.map b/node_modules/@azure/logger/dist/react-native/index.js.map new file mode 100644 index 0000000..1c399d6 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAwB,MAAM,YAAY,CAAC;AAGlD,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;GAIG;AACH,MAAM,CAAC,MAAM,WAAW,GAAsB,KAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC5B,KAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE,CAAC;IACpB,0FAA0F;IAC1F,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE,CAAC;QACrC,WAAW,CAAC,eAAe,CAAC,CAAC;IAC/B,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;IACJ,CAAC;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE,CAAC;QACvC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;YACzB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,CAAC;IACH,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;QACtB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IACtB,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC;QACzB,MAAM,iBAAiB,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;QAC1C,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3D,CAAC;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,OAAO,OAAO,CAAC,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC;AACrF,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { type Debugger } from \"./debug.js\";\nexport type { Debugger } from \"./debug.js\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \",\n )}.`,\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level. If no level is specified, logging is disabled.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`,\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100,\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed troubleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\"),\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level,\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]);\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/log.d.ts b/node_modules/@azure/logger/dist/react-native/log.d.ts new file mode 100644 index 0000000..d835a2c --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/log.d.ts @@ -0,0 +1,2 @@ +export declare function log(message: unknown, ...args: any[]): void; +//# sourceMappingURL=log.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/log.d.ts.map b/node_modules/@azure/logger/dist/react-native/log.d.ts.map new file mode 100644 index 0000000..470aea2 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/log.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAOA,wBAAgB,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAE1D"} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/log.js b/node_modules/@azure/logger/dist/react-native/log.js new file mode 100644 index 0000000..ec432c9 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/log.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EOL } from "node:os"; +import util from "node:util"; +import * as process from "node:process"; +export function log(message, ...args) { + process.stderr.write(`${util.format(message, ...args)}${EOL}`); +} +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/log.js.map b/node_modules/@azure/logger/dist/react-native/log.js.map new file mode 100644 index 0000000..1c64e18 --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,WAAW,CAAC;AAC7B,OAAO,KAAK,OAAO,MAAM,cAAc,CAAC;AAExC,MAAM,UAAU,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC;AACjE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EOL } from \"node:os\";\nimport util from \"node:util\";\nimport * as process from \"node:process\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/react-native/package.json b/node_modules/@azure/logger/dist/react-native/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/@azure/logger/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/logger/package.json b/node_modules/@azure/logger/package.json new file mode 100644 index 0000000..8a6059b --- /dev/null +++ b/node_modules/@azure/logger/package.json @@ -0,0 +1,118 @@ +{ + "name": "@azure/logger", + "sdk-type": "client", + "version": "1.1.1", + "description": "Microsoft Azure SDK for JavaScript - Logger", + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "engines": { + "node": ">=18.0.0" + }, + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "log", + "logger", + "logging", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger/README.md", + "sideEffects": false, + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "pretest": "npm run build:test", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "dotenv": "^16.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.11.1", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/ms-rest-js/LICENSE b/node_modules/@azure/ms-rest-js/LICENSE new file mode 100644 index 0000000..2107107 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@azure/ms-rest-js/README.md b/node_modules/@azure/ms-rest-js/README.md new file mode 100644 index 0000000..7beda9c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/README.md @@ -0,0 +1,41 @@ +# ms-rest-js + +[![Build Status](https://dev.azure.com/azure-public/azsdk/_apis/build/status/public.Azure.ms-rest-js%20-%20CI)](https://dev.azure.com/azure-public/azsdk/_build/latest?definitionId=39) + +Runtime for isomorphic javascript libraries (that work in the browser and node.js environment) generated via [Autorest](https://github.com/Azure/Autorest). + +## Requirements +- Node.js version > 6.x +- `npm install -g typescript` + +## Installation +- After cloning the repo, execute `npm install` + +## Execution + +### Node.js +- Set the subscriptionId and token as instructed in `samples/node-samples.ts` +- Run `npx ts-node samples/node-sample.js` + +### In the browser +- Run `npm run build` +- Set the subscriptionId and token then +- Open index.html file in the browser. It should show the response from GET request on the storage account. From Chrome type Ctrl + Shift + I and you can see the logs in console. + +## Architecture Overview + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/ms-rest-js/blob/master/docs/architectureOverview.md). + +# Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt b/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt new file mode 100644 index 0000000..8c622df --- /dev/null +++ b/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt @@ -0,0 +1,35 @@ +Third Party Notices for ms-rest-js + +This project incorporates material from the project(s) listed below (collectively, Third Party Code). +Microsoft, Inc. Microsoft is not the original author of the Third Party Code. +The original copyright notice and license, under which Microsoft received such Third Party Code, +are set out below. This Third Party Code is licensed to you under their original license terms set forth below. +Microsoft reserves all other rights not expressly granted, whether by implication, estoppel or otherwise. + +1. uuid (https://github.com/kelektiv/node-uuid) + +%% uuid NOTICES AND INFORMATION BEGIN HERE +========================================= +The MIT License (MIT) + +Copyright (c) 2010-2016 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +========================================= +END OF uuid NOTICES AND INFORMATION \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.js b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js new file mode 100644 index 0000000..01442a6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js @@ -0,0 +1,4425 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = global || self, factory(global.msRest = {})); +}(this, (function (exports) { 'use strict'; + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ + function getHeaderKey(headerName) { + return headerName.toLowerCase(); + } + function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; + } + /** + * A collection of HTTP header key/value pairs. + */ + var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * Encodes a string in base64 format. + * @param value the string to encode + */ + function encodeString(value) { + return btoa(value); + } + /** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ + function encodeByteArray(value) { + var str = ""; + for (var i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); + } + /** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ + function decodeString(value) { + var byteString = atob(value); + var arr = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } + + // Unique ID creation requires a high quality random # generator. In the browser we therefore + // require the crypto API and do not support built-in fallback to lower quality random number + // generators (like Math.random()). + var getRandomValues; + var rnds8 = new Uint8Array(16); + function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); + } + + var REGEX = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; + + function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); + } + + /** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + + var byteToHex = []; + + for (var i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); + } + + function stringify(arr) { + var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; + } + + function v4(options, buf, offset) { + options = options || {}; + var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.7.0", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, + }; + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A constant that indicates whether the environment is node.js or browser based. + */ + var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; + /** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ + function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); + } + /** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ + function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; + } + /** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ + function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; + } + /** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ + function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); + } + /** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ + function generateUuid() { + return v4(); + } + /** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ + function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; + } + /** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ + function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); + } + /** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ + function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; + } + /** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ + function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; + } + function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; + } + /** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ + function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); + } + var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + /** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ + function isDuration(value) { + return validateISODuration.test(value); + } + /** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ + function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); + } + /** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ + function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; + }()); + function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); + } + function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); + } + function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); + } + function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; + } + function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); + } + function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); + } + function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; + } + function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; + } + function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = encodeByteArray(value); + } + return value; + } + function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; + } + function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; + } + function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; + } + function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; + } + /** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ + function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; + } + function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; + } + function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); + } + function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; + } + function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; + } + function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; + } + function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; + } + function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); + } + function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); + } + // TODO: why is this here? + function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; + } + /** + * Utility function to create a K:V from a list of strings + */ + function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; + } + var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", + ]); + + // Copyright (c) Microsoft Corporation. All rights reserved. + function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; + } + /** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ + var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; + }()); + + /*! ***************************************************************************** + Copyright (c) Microsoft Corporation. + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + ***************************************************************************** */ + /* global Reflect, Promise */ + + var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + + function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + } + + var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); + }; + + function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + } + + function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + } + + function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var RestError = /** @class */ (function (_super) { + __extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; + }(Error)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ + var XhrHttpClient = /** @class */ (function () { + function XhrHttpClient() { + } + XhrHttpClient.prototype.sendRequest = function (request) { + var xhr = new XMLHttpRequest(); + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + var abortSignal = request.abortSignal; + if (abortSignal) { + var listener_1 = function () { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener_1); + xhr.addEventListener("readystatechange", function () { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener_1); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + var formData = request.formData; + var requestForm_1 = new FormData(); + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (var _i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + var formKey = _a[_i]; + var formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm_1; + request.formData = undefined; + var contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (var _b = 0, _c = request.headers.headersArray(); _b < _c.length; _b++) { + var header = _c[_b]; + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (request.streamResponseBody) { + return new Promise(function (resolve, reject) { + xhr.addEventListener("readystatechange", function () { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + var blobBody = new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody: blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + return resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + }; + return XhrHttpClient; + }()); + function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", function (rawEvent) { + return listener({ + loadedBytes: rawEvent.loaded, + }); + }); + } + } + // exported locally for testing + function parseHeaders(xhr) { + var responseHeaders = new HttpHeaders(); + var headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (var _i = 0, headerLines_1 = headerLines; _i < headerLines_1.length; _i++) { + var line = headerLines_1[_i]; + var index = line.indexOf(":"); + var headerName = line.slice(0, index); + var headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; + } + function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", function () { + return reject(new RestError("Failed to send request to " + request.url, RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + xhr.addEventListener("abort", function () { + return reject(new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request)); + }); + xhr.addEventListener("timeout", function () { + return reject(new RestError("timeout of " + xhr.timeout + "ms exceeded", RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + (function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + + // Copyright (c) Microsoft Corporation. + // Licensed under the MIT license. + /** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ + function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ + function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); + } + function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + var _a, _b; + var parser = new DOMParser(); + // Policy to make our code Trusted Types compliant. + // https://github.com/w3c/webappsec-trusted-types + // We are calling DOMParser.parseFromString() to parse XML payload from Azure services. + // The parsed DOM object is not exposed to outside. Scripts are disabled when parsing + // according to the spec. There are no HTML/XSS security concerns on the usage of + // parseFromString() here. + var ttPolicy; + try { + if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", { + createHTML: function (s) { return s; }, + }); + } + } + catch (e) { + console.warn('Could not create trusted types policy "@azure/ms-rest-js#xml.browser"'); + } + function parseXML(str) { + var _a; + try { + var dom = parser.parseFromString(((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _a !== void 0 ? _a : str), "application/xml"); + throwIfError(dom); + var obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } + } + var errorNS = ""; + try { + var invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); + errorNS = (_b = parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _b !== void 0 ? _b : ""; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + } + function throwIfError(dom) { + if (errorNS) { + var parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0).innerHTML); + } + } + } + function isElement(node) { + return !!node.attributes; + } + /** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ + function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; + } + function domToObject(node) { + var result = {}; + var childNodeCount = node.childNodes.length; + var firstChildNode = node.childNodes[0]; + var onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + var elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + for (var i = 0; i < elementWithAttributes.attributes.length; i++) { + var attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (var i = 0; i < childNodeCount; i++) { + var child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + var childObject = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; + } + // tslint:disable-next-line:no-null-keyword + var doc = document.implementation.createDocument(null, null, null); + var serializer = new XMLSerializer(); + function stringifyXML(obj, opts) { + var rootName = (opts && opts.rootName) || "root"; + var dom = buildNode(obj, rootName)[0]; + return ('' + serializer.serializeToString(dom)); + } + function buildAttributes(attrs) { + var result = []; + for (var _i = 0, _a = Object.keys(attrs); _i < _a.length; _i++) { + var key = _a[_i]; + var attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; + } + function buildNode(obj, elementName) { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + var elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + var result = []; + for (var _i = 0, obj_1 = obj; _i < obj_1.length; _i++) { + var arrayElem = obj_1[_i]; + for (var _a = 0, _b = buildNode(arrayElem, elementName); _a < _b.length; _a++) { + var child = _b[_a]; + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + var elem = doc.createElement(elementName); + for (var _c = 0, _d = Object.keys(obj); _c < _d.length; _c++) { + var key = _d[_c]; + if (key === "$") { + for (var _e = 0, _f = buildAttributes(obj[key]); _e < _f.length; _e++) { + var attr = _f[_e]; + elem.attributes.setNamedItem(attr); + } + } + else { + for (var _g = 0, _h = buildNode(obj[key], key); _g < _h.length; _g++) { + var child = _h[_g]; + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error("Illegal value passed to buildObject: " + obj); + } + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; + }()); + /** + * Optional properties that can be used when creating a RequestPolicy. + */ + var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ + function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; + } + var defaultJsonContentTypes = ["application/json", "text/json"]; + var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + /** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ + var DeserializationPolicy = /** @class */ (function (_super) { + __extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; + }(BaseRequestPolicy)); + function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; + } + function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; + } + function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = stripRequest(parsedResponse.request); + error.response = stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = stripRequest(parsedResponse.request); + restError.response = stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); + } + function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; + } + var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + var DEFAULT_CLIENT_RETRY_COUNT = 3; + var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + /** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ + var ExponentialRetryPolicy = /** @class */ (function (_super) { + __extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; + }(BaseRequestPolicy)); + /** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ + function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; + } + /** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ + function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; + } + function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; + } + var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + __extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + function getDefaultUserAgentKey() { + return "x-ms-command-name"; + } + function getPlatformSpecificData() { + var navigator = self.navigator; + var osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; + } + function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); + } + var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; + function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; + } + function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; + } + var UserAgentPolicy = /** @class */ (function (_super) { + __extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A class that handles the query portion of a URLBuilder. + */ + var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; + }()); + /** + * A class that handles creating, modifying, and parsing URLs. + */ + var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; + }()); + var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; + }()); + /** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ + function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); + } + /** + * A class that tokenizes URL strings. + */ + var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; + }()); + /** + * Read the remaining characters from this Tokenizer's character stream. + */ + function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; + } + /** + * Whether or not this URLTokenizer has a current character. + */ + function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; + } + /** + * Get the character in the text string at the current index. + */ + function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; + } + /** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ + function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } + } + /** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ + function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); + } + /** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ + function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; + } + /** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ + function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); + } + /** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ + function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); + } + function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } + } + function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + } + function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, + }; + function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; + } + var RedirectPolicy = /** @class */ (function (_super) { + __extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; + }(BaseRequestPolicy)); + function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); + } + function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; + } + + function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; + } + var RPRegistrationPolicy = /** @class */ (function (_super) { + __extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; + }(BaseRequestPolicy)); + function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); + } + /** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ + function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; + } + /** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ + function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; + } + /** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ + function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; + } + /** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ + function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); + } + /** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ + function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; + } + var SigningPolicy = /** @class */ (function (_super) { + __extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; + } + /** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ + var SystemErrorRetryPolicy = /** @class */ (function (_super) { + __extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; + }(BaseRequestPolicy)); + /** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ + function shouldRetry$1(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; + } + /** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ + function updateRetryData$1(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; + } + function retry$1(policy, request, operationResponse, err, retryData) { + return __awaiter(this, void 0, void 0, function () { + var error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData$1(policy, retryData, err); + if (!(err && + err.code && + shouldRetry$1(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + (function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; + })(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); + function agentPolicy(_agentSettings) { + return { + create: function (_nextPolicy, _options) { + throw agentNotSupportedInBrowser; + }, + }; + } + var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw agentNotSupportedInBrowser; + } + AgentPolicy.prototype.sendRequest = function (_request) { + throw agentNotSupportedInBrowser; + }; + return AgentPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); + function getDefaultProxySettings(_proxyUrl) { + return undefined; + } + function proxyPolicy(_proxySettings) { + return { + create: function (_nextPolicy, _options) { + throw proxyNotSupportedInBrowser; + }, + }; + } + var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw proxyNotSupportedInBrowser; + } + ProxyPolicy.prototype.sendRequest = function (_request) { + throw proxyNotSupportedInBrowser; + }; + return ProxyPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var StatusCodes = Constants.HttpConstants.StatusCodes; + var DEFAULT_RETRY_COUNT = 3; + function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; + } + /** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ + var ThrottlingRetryPolicy = /** @class */ (function (_super) { + __extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return __awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + /** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ + var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", + ]; + /** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ + var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return __awaiter(this, void 0, void 0, function () { + var accessToken, result; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return __awaiter(this, void 0, void 0, function () { + var tokenResponse; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * @class + * Initializes a new instance of the ServiceClient. + */ + var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new XhrHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; + }()); + function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } + } + function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; + } + function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; + } + function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = __assign(__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy()); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; + } + function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); + } + function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; + } + function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; + } + function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = __spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(__assign(__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; + } + var LogPolicy = /** @class */ (function (_super) { + __extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; + }(BaseRequestPolicy)); + function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var HeaderConstants = Constants.HeaderConstants; + var DEFAULT_AUTHORIZATION_SCHEME$1 = "Bearer"; + /** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ + var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var HeaderConstants$1 = Constants.HeaderConstants; + var DEFAULT_AUTHORIZATION_SCHEME$2 = "Basic"; + var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants$1.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * Authenticates to a service using an API key. + */ + var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var TopicCredentials = /** @class */ (function (_super) { + __extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; + }(ApiKeyCredentials)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DomainCredentials = /** @class */ (function (_super) { + __extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; + }(ApiKeyCredentials)); + + exports.ApiKeyCredentials = ApiKeyCredentials; + exports.AzureIdentityCredentialAdapter = AzureIdentityCredentialAdapter; + exports.BaseRequestPolicy = BaseRequestPolicy; + exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; + exports.Constants = Constants; + exports.DefaultHttpClient = XhrHttpClient; + exports.DomainCredentials = DomainCredentials; + exports.HttpHeaders = HttpHeaders; + exports.MapperType = MapperType; + exports.RequestPolicyOptions = RequestPolicyOptions; + exports.RestError = RestError; + exports.Serializer = Serializer; + exports.ServiceClient = ServiceClient; + exports.TokenCredentials = TokenCredentials; + exports.TopicCredentials = TopicCredentials; + exports.URLBuilder = URLBuilder; + exports.URLQuery = URLQuery; + exports.WebResource = WebResource; + exports.agentPolicy = agentPolicy; + exports.applyMixins = applyMixins; + exports.delay = delay; + exports.deserializationPolicy = deserializationPolicy; + exports.deserializeResponseBody = deserializeResponseBody; + exports.encodeUri = encodeUri; + exports.executePromisesSequentially = executePromisesSequentially; + exports.exponentialRetryPolicy = exponentialRetryPolicy; + exports.flattenResponse = flattenResponse; + exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; + exports.generateUuid = generateUuid; + exports.getDefaultProxySettings = getDefaultProxySettings; + exports.getDefaultUserAgentValue = getDefaultUserAgentValue; + exports.isDuration = isDuration; + exports.isNode = isNode; + exports.isValidUuid = isValidUuid; + exports.logPolicy = logPolicy; + exports.promiseToCallback = promiseToCallback; + exports.promiseToServiceCallback = promiseToServiceCallback; + exports.proxyPolicy = proxyPolicy; + exports.redirectPolicy = redirectPolicy; + exports.serializeObject = serializeObject; + exports.signingPolicy = signingPolicy; + exports.stripRequest = stripRequest; + exports.stripResponse = stripResponse; + exports.systemErrorRetryPolicy = systemErrorRetryPolicy; + exports.throttlingRetryPolicy = throttlingRetryPolicy; + exports.userAgentPolicy = userAgentPolicy; + + Object.defineProperty(exports, '__esModule', { value: true }); + +}))); +//# sourceMappingURL=msRest.browser.js.map diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map new file mode 100644 index 0000000..3f80e42 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.browser.js","sources":["../lib/httpHeaders.ts","../lib/util/base64.browser.ts","../node_modules/uuid/dist/esm-browser/rng.js","../node_modules/uuid/dist/esm-browser/regex.js","../node_modules/uuid/dist/esm-browser/validate.js","../node_modules/uuid/dist/esm-browser/stringify.js","../node_modules/uuid/dist/esm-browser/v4.js","../lib/util/constants.ts","../lib/util/utils.ts","../lib/serializer.ts","../lib/webResource.ts","../node_modules/tslib/tslib.es6.js","../lib/restError.ts","../lib/xhrHttpClient.ts","../lib/httpPipelineLogLevel.ts","../node_modules/@azure/core-auth/src/tokenCredential.ts","../lib/operationParameter.ts","../lib/operationSpec.ts","../lib/util/xml.browser.ts","../lib/policies/requestPolicy.ts","../lib/policies/deserializationPolicy.ts","../lib/policies/exponentialRetryPolicy.ts","../lib/policies/generateClientRequestIdPolicy.ts","../lib/policies/msRestUserAgentPolicy.browser.ts","../lib/policies/userAgentPolicy.ts","../lib/url.ts","../lib/policies/redirectPolicy.ts","../lib/policies/rpRegistrationPolicy.ts","../lib/policies/signingPolicy.ts","../lib/policies/systemErrorRetryPolicy.ts","../lib/queryCollectionFormat.ts","../lib/policies/agentPolicy.browser.ts","../lib/policies/proxyPolicy.browser.ts","../lib/policies/throttlingRetryPolicy.ts","../lib/credentials/azureIdentityTokenCredentialAdapter.ts","../lib/serviceClient.ts","../lib/policies/logPolicy.ts","../lib/credentials/tokenCredentials.ts","../lib/credentials/basicAuthenticationCredentials.ts","../lib/credentials/apiKeyCredentials.ts","../lib/credentials/topicCredentials.ts","../lib/credentials/domainCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string) {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: any): object is HttpHeadersLike {\n if (!object || typeof object !== \"object\") {\n return false;\n }\n\n if (\n typeof object.rawHeaders === \"function\" &&\n typeof object.clone === \"function\" &&\n typeof object.get === \"function\" &&\n typeof object.set === \"function\" &&\n typeof object.contains === \"function\" &&\n typeof object.remove === \"function\" &&\n typeof object.headersArray === \"function\" &&\n typeof object.headerValues === \"function\" &&\n typeof object.headerNames === \"function\" &&\n typeof object.toJson === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(): RawHttpHeaders {\n return this.rawHeaders();\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson());\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n return new HttpHeaders(this.rawHeaders());\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * Encodes a string in base64 format.\n * @param value the string to encode\n */\nexport function encodeString(value: string): string {\n return btoa(value);\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value the Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n let str = \"\";\n for (let i = 0; i < value.length; i++) {\n str += String.fromCharCode(value[i]);\n }\n return btoa(str);\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value the base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n}\n","// Unique ID creation requires a high quality random # generator. In the browser we therefore\n// require the crypto API and do not support built-in fallback to lower quality random number\n// generators (like Math.random()).\nvar getRandomValues;\nvar rnds8 = new Uint8Array(16);\nexport default function rng() {\n // lazy load so that environments that need to polyfill have a chance to do so\n if (!getRandomValues) {\n // getRandomValues needs to be invoked in a context where \"this\" is a Crypto implementation. Also,\n // find the complete implementation of crypto (msCrypto) on IE11.\n getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);\n\n if (!getRandomValues) {\n throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');\n }\n }\n\n return getRandomValues(rnds8);\n}","export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;","import REGEX from './regex.js';\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && REGEX.test(uuid);\n}\n\nexport default validate;","import validate from './validate.js';\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\n\nvar byteToHex = [];\n\nfor (var i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr) {\n var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (var i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.7.0\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpClient } from \"./httpClient\";\nimport { HttpHeaders } from \"./httpHeaders\";\nimport { WebResourceLike, TransferProgressEvent } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.agentSettings) {\n throw new Error(\"HTTP agent settings not supported in browser environment\");\n }\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n const listener = () => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n xhr.responseType = request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (request.streamResponseBody) {\n return new Promise((resolve, reject) => {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n }\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n) {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest) {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n) {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n xhr.addEventListener(\"abort\", () =>\n reject(\n new RestError(\"The request was aborted\", RestError.REQUEST_ABORTED_ERROR, undefined, request)\n )\n );\n xhr.addEventListener(\"timeout\", () =>\n reject(\n new RestError(\n `timeout of ${xhr.timeout}ms exceeded`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nconst parser = new DOMParser();\n\n// Policy to make our code Trusted Types compliant.\n// https://github.com/w3c/webappsec-trusted-types\n// We are calling DOMParser.parseFromString() to parse XML payload from Azure services.\n// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing\n// according to the spec. There are no HTML/XSS security concerns on the usage of\n// parseFromString() here.\nlet ttPolicy: Pick | undefined;\ntry {\n if (typeof self.trustedTypes !== \"undefined\") {\n ttPolicy = self.trustedTypes.createPolicy(\"@azure/ms-rest-js#xml.browser\", {\n createHTML: (s: any) => s,\n });\n }\n} catch (e) {\n console.warn('Could not create trusted types policy \"@azure/ms-rest-js#xml.browser\"');\n}\n\nexport function parseXML(str: string): Promise {\n try {\n const dom = parser.parseFromString((ttPolicy?.createHTML(str) ?? str) as string, \"application/xml\");\n throwIfError(dom);\n\n const obj = domToObject(dom.childNodes[0]);\n return Promise.resolve(obj);\n } catch (err) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS = \"\";\ntry {\n const invalidXML = (ttPolicy?.createHTML(\"INVALID\") ?? \"INVALID\") as string;\n errorNS =\n parser.parseFromString(invalidXML, \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n} catch (ignored) {\n // Most browsers will return a document containing , but IE will throw.\n}\n\nfunction throwIfError(dom: Document) {\n if (errorNS) {\n const parserErrors = dom.getElementsByTagNameNS(errorNS, \"parsererror\");\n if (parserErrors.length) {\n throw new Error(parserErrors.item(0)!.innerHTML);\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[\"$\"] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[\"$\"][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[\"_\"] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\n// tslint:disable-next-line:no-null-keyword\nconst doc = document.implementation.createDocument(null, null, null);\nconst serializer = new XMLSerializer();\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const rootName = (opts && opts.rootName) || \"root\";\n const dom = buildNode(obj, rootName)[0];\n return (\n '' + serializer.serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = doc.createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string): Node[] {\n if (typeof obj === \"string\" || typeof obj === \"number\" || typeof obj === \"boolean\") {\n const elem = doc.createElement(elementName);\n elem.textContent = obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = doc.createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === \"$\") {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else {\n for (const child of buildNode(obj[key], key)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n * and \"plugins\" section in webpack.testconfig.ts.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-command-name\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst agentNotSupportedInBrowser = new Error(\"AgentPolicy is not supported in browser environment\");\n\nexport function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw agentNotSupportedInBrowser;\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw agentNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw agentNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ProxySettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","HttpPipelineLogLevel","utils.stripRequest","utils.stripResponse","utils\n .delay","utils.generateUuid","utils\n .delay","retry","shouldRetry","updateRetryData","utils.delay","QueryCollectionFormat","MSRestConstants","DefaultHttpClient","utils.prepareXMLRootList","utils.isPrimitiveType","DEFAULT_AUTHORIZATION_SCHEME","HeaderConstants","base64.encodeString"],"mappings":";;;;;;;;;;IAAA;IACA;IAEA;;;IAGA,SAAS,YAAY,CAAC,UAAkB;QACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;IAClC,CAAC;aA4Ee,iBAAiB,CAAC,MAAY;QAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YACzC,OAAO,KAAK,CAAC;SACd;QAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;YACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;YAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;YAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;YAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;YACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;YACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;YACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;YACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;YACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;QAME,qBAAY,UAA2B;YACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,UAAU,EAAE;gBACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;oBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;iBAC9C;aACF;SACF;;;;;;;QAQM,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;YACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;gBAC3C,IAAI,EAAE,UAAU;gBAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;aAC9B,CAAC;SACH;;;;;;QAOM,yBAAG,GAAV,UAAW,UAAkB;YAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;YACtE,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;SAC3C;;;;QAKM,8BAAQ,GAAf,UAAgB,UAAkB;YAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;SACrD;;;;;;QAOM,4BAAM,GAAb,UAAc,UAAkB;YAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;YAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;YAClD,OAAO,MAAM,CAAC;SACf;;;;QAKM,gCAAU,GAAjB;YACE,IAAM,MAAM,GAAmB,EAAE,CAAC;YAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aAClD;YACD,OAAO,MAAM,CAAC;SACf;;;;QAKM,kCAAY,GAAnB;YACE,IAAM,OAAO,GAAiB,EAAE,CAAC;YACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;aAC3C;YACD,OAAO,OAAO,CAAC;SAChB;;;;QAKM,iCAAW,GAAlB;YACE,IAAM,WAAW,GAAa,EAAE,CAAC;YACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;aACnC;YACD,OAAO,WAAW,CAAC;SACpB;;;;QAKM,kCAAY,GAAnB;YACE,IAAM,YAAY,GAAa,EAAE,CAAC;YAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;aACrC;YACD,OAAO,YAAY,CAAC;SACrB;;;;QAKM,4BAAM,GAAb;YACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;SAC1B;;;;QAKM,8BAAQ,GAAf;YACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;SACtC;;;;QAKM,2BAAK,GAAZ;YACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;SAC3C;QACH,kBAAC;IAAD,CAAC;;ICrOD;IACA;IAEA;;;;AAIA,aAAgB,YAAY,CAAC,KAAa;QACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;IACrB,CAAC;IAED;;;;AAIA,aAAgB,eAAe,CAAC,KAAiB;QAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;SACtC;QACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;IACnB,CAAC;IAED;;;;AAIA,aAAgB,YAAY,CAAC,KAAa;QACxC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;SACnC;QACD,OAAO,GAAG,CAAC;IACb,CAAC;;IClCD;IACA;IACA;IACA,IAAI,eAAe,CAAC;IACpB,IAAI,KAAK,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;AAC/B,IAAe,SAAS,GAAG,GAAG;IAC9B;IACA,EAAE,IAAI,CAAC,eAAe,EAAE;IACxB;IACA;IACA,IAAI,eAAe,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,eAAe,IAAI,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,OAAO,QAAQ,KAAK,WAAW,IAAI,OAAO,QAAQ,CAAC,eAAe,KAAK,UAAU,IAAI,QAAQ,CAAC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACrP;IACA,IAAI,IAAI,CAAC,eAAe,EAAE;IAC1B,MAAM,MAAM,IAAI,KAAK,CAAC,0GAA0G,CAAC,CAAC;IAClI,KAAK;IACL,GAAG;AACH;IACA,EAAE,OAAO,eAAe,CAAC,KAAK,CAAC,CAAC;IAChC;;AClBA,gBAAe,qHAAqH;;sIAAC,lICErI,SAAS,QAAQ,CAAC,IAAI,EAAE;IACxB,EAAE,OAAO,OAAO,IAAI,KAAK,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACtD,CAAC;;ICHD;IACA;IACA;IACA;AACA;IACA,IAAI,SAAS,GAAG,EAAE,CAAC;AACnB;IACA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,CAAC,EAAE;IAC9B,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IACrD,CAAC;AACD;IACA,SAAS,SAAS,CAAC,GAAG,EAAE;IACxB,EAAE,IAAI,MAAM,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IACrF;IACA;IACA,EAAE,IAAI,IAAI,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,CAAC;IACzgB;IACA;IACA;IACA;AACA;IACA,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;IACvB,IAAI,MAAM,SAAS,CAAC,6BAA6B,CAAC,CAAC;IACnD,GAAG;AACH;IACA,EAAE,OAAO,IAAI,CAAC;IACd,CAAC;;ICxBD,SAAS,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE;IAClC,EAAE,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;IAC1B,EAAE,IAAI,IAAI,GAAG,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,IAAI,GAAG,GAAG,CAAC;AACtD;IACA,EAAE,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;IAClC,EAAE,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClC;IACA,EAAE,IAAI,GAAG,EAAE;IACX,IAAI,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;AACzB;IACA,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,CAAC,EAAE;IACjC,MAAM,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IAChC,KAAK;AACL;IACA,IAAI,OAAO,GAAG,CAAC;IACf,GAAG;AACH;IACA,EAAE,OAAO,SAAS,CAAC,IAAI,CAAC,CAAC;IACzB,CAAC;;ICrBD;IACA;AAEA,QAAa,SAAS,GAAG;;;;;;QAMvB,aAAa,EAAE,OAAO;;;;;;;QAQtB,IAAI,EAAE,OAAO;;;;;;;QAQb,KAAK,EAAE,QAAQ;;;;;;;QAQf,UAAU,EAAE,YAAY;;;;;;;QAQxB,WAAW,EAAE,aAAa;;;;QAK1B,QAAQ,EAAE,UAAU;;;;QAKpB,SAAS,EAAE,WAAW;QAEtB,aAAa,EAAE;;;;;;;YAOb,SAAS,EAAE;gBACT,GAAG,EAAE,KAAK;gBACV,GAAG,EAAE,KAAK;gBACV,MAAM,EAAE,QAAQ;gBAChB,IAAI,EAAE,MAAM;gBACZ,KAAK,EAAE,OAAO;gBACd,IAAI,EAAE,MAAM;gBACZ,KAAK,EAAE,OAAO;aACf;YAED,WAAW,EAAE;gBACX,eAAe,EAAE,GAAG;aACrB;SACF;;;;QAKD,eAAe,EAAE;;;;;;;YAOf,aAAa,EAAE,eAAe;YAE9B,oBAAoB,EAAE,QAAQ;;;;;;;;;YAU9B,WAAW,EAAE,aAAa;;;;;;;YAQ1B,UAAU,EAAE,YAAY;SACzB;KACF;;IC3GD;AACA,IAQA;;;AAGA,QAAa,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;QAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;QACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;QAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B,IAUA;;;;;;AAMA,aAAgB,SAAS,CAAC,GAAW;QACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;aAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;aACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;aACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;aACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;aACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;;;AAQA,aAAgB,aAAa,CAAC,QAA+B;QAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;QACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;QAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;QAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;QAC1C,OAAO,gBAAgB,CAAC;IAC1B,CAAC;IAED;;;;;;;;AAQA,aAAgB,YAAY,CAAC,OAAwB;QACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;QACxC,IAAI,eAAe,CAAC,OAAO,EAAE;YAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;SACjD;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAED;;;;;;;AAOA,aAAgB,WAAW,CAAC,IAAY;QACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;QACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACnC,CAAC;AAED,IA4BA;;;;;AAKA,aAAgB,YAAY;QAC1B,OAAOA,EAAM,EAAE,CAAC;IAClB,CAAC;IAED;;;;;;;;;;;AAWA,aAAgB,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;QACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;YACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;SACtC,CAAC,CAAC;QACH,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,IAeA;;;;;;AAMA,aAAgB,KAAK,CAAI,CAAS,EAAE,KAAS;QAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,GAAA,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;IACvE,CAAC;IAqBD;;;;;;AAMA,aAAgB,iBAAiB,CAAC,OAAqB;QACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QACD,OAAO,UAAC,EAAY;YAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;gBACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;aACrB,EACD,UAAC,GAAU;gBACT,EAAE,CAAC,GAAG,CAAC,CAAC;aACT,CACF,CAAC;SACH,CAAC;IACJ,CAAC;IAED;;;;;AAKA,aAAgB,wBAAwB,CAAI,OAAuC;QACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QACD,OAAO,UAAC,EAAsB;YAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;gBAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;aAC3E,EACD,UAAC,GAAU;gBACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;aAC3B,CACF,CAAC;SACH,CAAC;IACJ,CAAC;AAED,aAAgB,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;QAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACb;QACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;IAChC,CAAC;IAED;;;;;AAKA,aAAgB,WAAW,CAAC,UAAe,EAAE,WAAkB;QAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;YAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;gBAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;aAC1D,CAAC,CAAC;SACJ,CAAC,CAAC;IACL,CAAC;IAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;IAElM;;;;;AAKA,aAAgB,UAAU,CAAC,KAAa;QACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACzC,CAAC;IAED;;;;;;;AAOA,aAAgB,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;QAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;IAC5F,CAAC;IAED;;;;;;AAMA,aAAgB,eAAe,CAAC,KAAU;QACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;IACtF,CAAC;;IC/RD;AACA;QAME,oBACkB,YAAyC,EACzC,KAAe;YADf,6BAAA,EAAA,iBAAyC;YAAzC,iBAAY,GAAZ,YAAY,CAA6B;YACzC,UAAK,GAAL,KAAK,CAAU;SAC7B;QAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;YAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;gBACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;aACH,CAAC;YACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;gBACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;gBACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;oBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;oBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;oBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;oBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;oBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;iBACtC;gBACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;oBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;iBACxC;gBACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;oBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;iBACtC;gBACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;oBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;iBACxC;gBACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;oBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC1C;gBACD,IAAI,OAAO,EAAE;oBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;oBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;wBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;qBACpC;iBACF;gBACD,IACE,WAAW;oBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAA,CAAC,EAC5E;oBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;iBAC5C;aACF;SACF;;;;;;;;;;;;QAaD,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;YACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;YACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;YAC9C,IAAI,CAAC,UAAU,EAAE;gBACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;aACrC;YACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAC7C,OAAO,GAAG,EAAE,CAAC;aACd;YAED,IAAI,MAAM,CAAC,UAAU,EAAE;gBACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;aAC9B;;;;;;;;;;YAYO,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;YAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;aACvD;YACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;aAC/D;YACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;gBACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;aAClD;YAED,IAAI,MAAM,IAAI,SAAS,EAAE;gBACvB,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM;;gBAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;gBACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;oBACxC,OAAO,GAAG,MAAM,CAAC;iBAClB;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;oBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;iBAC/D;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;oBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;oBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;iBAChF;qBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;oBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBAC9D;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;iBACtD;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;iBACtD;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACrF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;oBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACzF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACvF;aACF;YACD,OAAO,OAAO,CAAC;SAChB;;;;;;;;;;;;QAaD,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;YAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;oBAIzE,YAAY,GAAG,EAAE,CAAC;iBACnB;;gBAED,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;oBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;iBACpC;gBACD,OAAO,YAAY,CAAC;aACrB;YAED,IAAI,OAAY,CAAC;YACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;YACpC,IAAI,CAAC,UAAU,EAAE;gBACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;aACrC;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC/F;iBAAM;gBACL,IAAI,IAAI,CAAC,KAAK,EAAE;;;;;;oBAMd,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;qBAClC;iBACF;gBAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;oBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;oBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;wBAClB,OAAO,GAAG,YAAY,CAAC;qBACxB;iBACF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;oBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;wBAC3B,OAAO,GAAG,IAAI,CAAC;qBAChB;yBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;wBACnC,OAAO,GAAG,KAAK,CAAC;qBACjB;yBAAM;wBACL,OAAO,GAAG,YAAY,CAAC;qBACxB;iBACF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;oBACzF,OAAO,GAAG,YAAY,CAAC;iBACxB;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;oBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;iBAClC;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;iBACxC;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAGC,YAAmB,CAAC,YAAY,CAAC,CAAC;iBAC7C;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;iBAC9C;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC7F;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;oBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;iBACH;aACF;YAED,IAAI,MAAM,CAAC,UAAU,EAAE;gBACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC;SAChB;QACH,iBAAC;IAAD,CAAC,IAAA;IAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;QACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;QACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;YAC1C,EAAE,GAAG,CAAC;SACP;QACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,iBAAiB,CAAC,MAAW;QACpC,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,SAAS,CAAC;SAClB;QACD,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;SAC5F;;QAED,IAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;QAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IACnE,CAAC;IAED,SAAS,oBAAoB,CAAC,GAAW;QACvC,IAAI,CAAC,GAAG,EAAE;YACR,OAAO,SAAS,CAAC;SAClB;QACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;SACxF;;QAED,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;;QAElD,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;IAClC,CAAC;IAED,SAAS,kBAAkB,CAAC,IAAwB;QAClD,IAAM,OAAO,GAAa,EAAE,CAAC;QAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;QACtB,IAAI,IAAI,EAAE;YACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;gBAAxB,IAAM,IAAI,iBAAA;gBACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;oBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;iBACvD;qBAAM;oBACL,YAAY,IAAI,IAAI,CAAC;oBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oBAC3B,YAAY,GAAG,EAAE,CAAC;iBACnB;aACF;SACF;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,SAAS,cAAc,CAAC,CAAgB;QACtC,IAAI,CAAC,CAAC,EAAE;YACN,OAAO,SAAS,CAAC;SAClB;QAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;SAC3B;QACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IAClD,CAAC;IAED,SAAS,cAAc,CAAC,CAAS;QAC/B,IAAI,CAAC,CAAC,EAAE;YACN,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;QAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;YACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;oBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;iBAC9E;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;oBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;iBAChF;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAC9C,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;oBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;iBACH;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;oBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;iBAC/E;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;gBAChC,IACE,UAAU,KAAK,QAAQ;oBACvB,UAAU,KAAK,UAAU;oBACzB,EAAE,KAAK,YAAY,WAAW,CAAC;oBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;oBAC1B,EAAE,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;oBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;iBACH;aACF;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;QAClF,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;SACH;QACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;YACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;aACnD;YACD,OAAO,IAAI,KAAK,KAAK,CAAC;SACvB,CAAC,CAAC;QACH,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;SACH;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;QAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;aAC9D;YACD,KAAK,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;QAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;aAC9D;YACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;SAClC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;QAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBACvC,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;iBAC5F;gBACD,KAAK;oBACH,KAAK,YAAY,IAAI;0BACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;0BACpC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;aACtD;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;iBAC5F;gBACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;aACrF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;gBACzD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;iBAC7F;gBACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;aACrF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;wBAChF,mDAAmD,CACtD,CAAC;iBACH;gBACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;aAC/B;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;oBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;iBACH;gBACD,KAAK,GAAG,KAAK,CAAC;aACf;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;QAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;SACzD;QACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;QACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;iBACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;SACH;QACD,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;SACzE;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;QAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;SAC1D;QACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;QACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;YAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;iBACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;SACH;QACD,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;YAAlC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;SAC5F;QACD,OAAO,cAAc,CAAC;IACxB,CAAC;IAED;;;;;IAKA,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;QAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;QAC7C,IAAI,CAAC,UAAU,EAAE;YACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;aACH;YAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YACvD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;aACnF;YACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;YAC9C,IAAI,CAAC,UAAU,EAAE;gBACf,MAAM,IAAI,KAAK,CACb,qDAAqD;qBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;aACH;SACF;QAED,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;QAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;YAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;SACzE;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;YACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;gBAAtC,IAAM,GAAG,SAAA;gBACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;gBACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;oBAC3B,SAAS;iBACV;gBAED,IAAI,QAAQ,SAAoB,CAAC;gBACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;gBAChC,IAAI,UAAU,CAAC,KAAK,EAAE;oBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;qBACnC;yBAAM;wBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;qBACpE;iBACF;qBAAM;oBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;oBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;oBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;wBAAzB,IAAM,QAAQ,cAAA;wBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;wBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;4BACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;yBAC7B;wBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;qBACvC;iBACF;gBAED,IAAI,YAAY,IAAI,SAAS,EAAE;oBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;0BAChC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;0BAChD,UAAU,CAAC;oBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;oBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;oBAC5F,IACE,wBAAwB;wBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;wBAC3C,WAAW,IAAI,SAAS,EACxB;wBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;qBACrC;oBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;oBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;wBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;;;;4BAIjC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;4BACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;yBAC5C;6BAAM,IAAI,cAAc,CAAC,YAAY,EAAE;4BACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;yBAChF;6BAAM;4BACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;yBAC1C;qBACF;iBACF;aACF;YAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;YACpE,IAAI,0BAA0B,EAAE;gBAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;wCAC/B,cAAc;oBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,GAAA,CAAC,CAAC;oBAC5E,IAAI,oBAAoB,EAAE;wBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;qBACH;;gBARH,KAAK,IAAM,cAAc,IAAI,MAAM;4BAAxB,cAAc;iBASxB;aACF;YAED,OAAO,OAAO,CAAC;SAChB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,oBAAoB,CAAC,YAAoB;QAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;IAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;QAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;YAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;SACnF;QAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;QAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;QAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;YAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;YACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;YACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;gBACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;aACxD;YAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;YAC3F,IAAI,sBAAsB,EAAE;gBAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;gBAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;oBAA9C,IAAM,SAAS,SAAA;oBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;wBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;qBACH;oBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;iBACtC;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;aAC5B;iBAAM,IAAI,UAAU,CAAC,KAAK,EAAE;gBAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;oBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;iBACH;qBAAM;oBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;oBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;oBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;wBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;wBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;wBAC3D,IAAI,kBAAkB,EAAE;4BACtB,iBAAiB,GAAG,EAAE,CAAC;yBACxB;qBACF;oBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;iBACH;aACF;iBAAM;;gBAEL,IAAI,gBAAgB,SAAA,CAAC;gBACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;gBAEvB,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAArB,IAAM,IAAI,cAAA;oBACb,IAAI,CAAC,GAAG;wBAAE,MAAM;oBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;iBACjB;gBACD,gBAAgB,GAAG,GAAG,CAAC;gBACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;gBAUtE,IACE,wBAAwB;oBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;oBAC3C,gBAAgB,IAAI,SAAS,EAC7B;oBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;iBAC1C;gBAED,IAAI,eAAe,SAAA,CAAC;;gBAEpB,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;oBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;oBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;;;oBAGF,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;wBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;wBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;4BACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;yBAC5B;qBACF;oBACD,QAAQ,GAAG,aAAa,CAAC;iBAC1B;qBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;oBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;oBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;iBACjC;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;gBACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;oBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;oBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;wBACjC,OAAO,KAAK,CAAC;qBACd;iBACF;gBACD,OAAO,IAAI,CAAC;aACb,CAAC;YAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;gBAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;oBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;iBACH;aACF;SACF;aAAM,IAAI,YAAY,EAAE;YACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAAxC,IAAM,GAAG,SAAA;gBACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;oBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;oBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;oBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBACnC;aACF;SACF;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;;QAGlB,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;QAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;iBACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;SACH;QACD,IAAI,YAAY,EAAE;YAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;YAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAAxC,IAAM,GAAG,SAAA;gBACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;aACpF;YACD,OAAO,cAAc,CAAC;SACvB;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;;QAGlB,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;QACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;iBACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;SACH;QACD,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;gBAEhC,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;aAC/B;YAED,IAAM,SAAS,GAAG,EAAE,CAAC;YACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;aACxF;YACD,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;QAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QAC5F,IAAI,wBAAwB,EAAE;YAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;YAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;gBAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;gBACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;oBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;oBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;0BAC3B,kBAAkB;0BAClB,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;oBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;oBACrF,IAAI,iBAAiB,EAAE;wBACrB,MAAM,GAAG,iBAAiB,CAAC;qBAC5B;iBACF;aACF;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;QAEvB,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;YACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;YACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;IACJ,CAAC;IAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;QAClF,QACE,QAAQ;YACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;YACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;IACJ,CAAC;IAoHD;AACA,aAAgB,eAAe,CAAC,WAAgB;QAC9C,IAAI,WAAW,IAAI,SAAS;YAAE,OAAO,SAAS,CAAC;QAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;YACrC,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;YAClD,OAAO,WAAW,CAAC;SACpB;aAAM,IAAI,WAAW,YAAY,IAAI,EAAE;YACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;SAClC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;YACrC,IAAM,KAAK,GAAG,EAAE,CAAC;YACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;aAC7C;YACD,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;YAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;gBAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;aAC/D;YACD,OAAO,UAAU,CAAC;SACnB;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;;IAGA,SAAS,OAAO,CAAmB,CAAW;QAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;YAAhB,IAAM,GAAG,UAAA;YACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;SACnB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,QAAa,UAAU,GAAG,OAAO,CAAC;QAChC,WAAW;QACX,SAAS;QACT,WAAW;QACX,WAAW;QACX,MAAM;QACN,UAAU;QACV,iBAAiB;QACjB,YAAY;QACZ,MAAM;QACN,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,UAAU;KACX,CAAC;;ICtiCF;AACA,aAgKgB,iBAAiB,CAAC,MAAW;QAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,KAAK,CAAC;SACd;QACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;YAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;YACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;YAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;YACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;YACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;YACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;AAQA;QAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;YAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;YACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;YAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;YAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;YACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;YAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;YAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;YACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;YAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;SACpC;;;;;;QAOD,+CAAyB,GAAzB;YACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;aACjD;SACF;;;;;;QAOD,6BAAO,GAAP,UAAQ,OAA8B;YACpC,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;aAC/C;YAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;aACrD;YAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;aACH;YAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;iBACvF,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;gBACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;;YAGD,IAAI,OAAO,CAAC,GAAG,EAAE;gBACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;oBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;iBAC1D;gBACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;aACxB;;YAGD,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;gBAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;wBACrB,OAAO,CAAC,MAAM;wBACd,4CAA4C;wBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;iBACH;aACF;YACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;YAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;gBAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;gBACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;oBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;iBACnE;gBACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;oBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;iBAClD;gBACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;gBAChC,IAAI,KAAG,GACL,OAAO;qBACN,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;qBACjC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAY,CAAC,CAAC;gBACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;gBAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;oBAC/B,IAAI,CAAC,gBAAc,EAAE;wBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;qBACH;oBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;wBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;wBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;wBAC5E,IACE,SAAS,KAAK,IAAI;4BAClB,SAAS,KAAK,SAAS;4BACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;4BACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;iCAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;iCACJ,8EAA0E,aAAa,kCAA6B,CAAA;iCACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;yBACH;wBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;4BAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;yBACxD;wBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;4BAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;gCACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;6BACH;4BACD,IAAI,SAAS,CAAC,eAAe,EAAE;gCAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;6BAC1C;iCAAM;gCACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;6BAC9D;yBACF;qBACF,CAAC,CAAC;iBACJ;gBACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;aAChB;;YAGD,IAAI,OAAO,CAAC,eAAe,EAAE;gBAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;gBAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;oBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;wBAC3E,yFAAqF;wBACrF,mJAA2I,CAC9I,CAAC;iBACH;;gBAED,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;iBACjB;;gBAED,IAAM,WAAW,GAAG,EAAE,CAAC;;gBAEvB,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;gBAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;oBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;oBACxD,IAAI,UAAU,EAAE;wBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;4BACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;yBAC7D;6BAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;gCACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;6BACH;4BACD,IAAI,UAAU,CAAC,eAAe,EAAE;gCAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;gCAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;6BAC/C;iCAAM;gCACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;gCAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;6BACnE;yBACF;qBACF;iBACF;;gBAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACnC;;YAGD,IAAI,OAAO,CAAC,OAAO,EAAE;gBACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;gBAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;oBAAlD,IAAM,UAAU,SAAA;oBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;iBACnD;aACF;;YAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;aAC9C;;YAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;gBAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;aAC5D;;YAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;gBACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;aACrE;;YAGD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;YACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;;gBAE7B,IAAI,OAAO,CAAC,YAAY,EAAE;oBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;wBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;qBAClD;oBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;wBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;qBAC9D;iBACF;qBAAM;oBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;wBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;qBACH;oBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;wBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;qBAC1C;iBACF;aACF;YAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;YACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;YACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;YACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;YAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;YAErD,OAAO,IAAI,CAAC;SACb;;;;;QAMD,2BAAK,GAAL;YACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;YAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;aACjC;YAED,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;gBAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;aACnD;YAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;gBAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;aAC/D;YAED,OAAO,MAAM,CAAC;SACf;QACH,kBAAC;IAAD,CAAC;;IChhBD;IACA;AACA;IACA;IACA;AACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;AACA;IACA,IAAI,aAAa,GAAG,SAAS,CAAC,EAAE,CAAC,EAAE;IACnC,IAAI,aAAa,GAAG,MAAM,CAAC,cAAc;IACzC,SAAS,EAAE,SAAS,EAAE,EAAE,EAAE,YAAY,KAAK,IAAI,UAAU,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,CAAC;IACpF,QAAQ,UAAU,CAAC,EAAE,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACnF,IAAI,OAAO,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC;AACF;AACA,IAAO,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;IAChC,IAAI,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACxB,IAAI,SAAS,EAAE,GAAG,EAAE,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE;IAC3C,IAAI,CAAC,CAAC,SAAS,GAAG,CAAC,KAAK,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,SAAS,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;IACzF,CAAC;AACD;AACA,IAAO,IAAI,QAAQ,GAAG,WAAW;IACjC,IAAI,QAAQ,GAAG,MAAM,CAAC,MAAM,IAAI,SAAS,QAAQ,CAAC,CAAC,EAAE;IACrD,QAAQ,KAAK,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;IAC7D,YAAY,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;IAC7B,YAAY,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACzF,SAAS;IACT,QAAQ,OAAO,CAAC,CAAC;IACjB,MAAK;IACL,IAAI,OAAO,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;IAC3C,EAAC;AACD,AA2BA;AACA,IAAO,SAAS,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,SAAS,EAAE;IAC7D,IAAI,SAAS,KAAK,CAAC,KAAK,EAAE,EAAE,OAAO,KAAK,YAAY,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC,UAAU,OAAO,EAAE,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;IAChH,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;IAC/D,QAAQ,SAAS,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;IACnG,QAAQ,SAAS,QAAQ,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;IACtG,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,EAAE;IACtH,QAAQ,IAAI,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,KAAK,CAAC,OAAO,EAAE,UAAU,IAAI,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;IAC9E,KAAK,CAAC,CAAC;IACP,CAAC;AACD;AACA,IAAO,SAAS,WAAW,CAAC,OAAO,EAAE,IAAI,EAAE;IAC3C,IAAI,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;IACrH,IAAI,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,WAAW,EAAE,OAAO,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;IAC7J,IAAI,SAAS,IAAI,CAAC,CAAC,EAAE,EAAE,OAAO,UAAU,CAAC,EAAE,EAAE,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;IACtE,IAAI,SAAS,IAAI,CAAC,EAAE,EAAE;IACtB,QAAQ,IAAI,CAAC,EAAE,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC;IACtE,QAAQ,OAAO,CAAC,EAAE,IAAI;IACtB,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACzK,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IACpD,YAAY,QAAQ,EAAE,CAAC,CAAC,CAAC;IACzB,gBAAgB,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM;IAC9C,gBAAgB,KAAK,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;IACxE,gBAAgB,KAAK,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS;IACjE,gBAAgB,KAAK,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,SAAS;IACjE,gBAAgB;IAChB,oBAAoB,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,EAAE;IAChI,oBAAoB,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;IAC1G,oBAAoB,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE;IACzF,oBAAoB,IAAI,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE;IACvF,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;IAC1C,oBAAoB,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,SAAS;IAC3C,aAAa;IACb,YAAY,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IACvC,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE;IAClE,QAAQ,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IACzF,KAAK;IACL,CAAC;AACD,AA4CA;AACA,IAAO,SAAS,cAAc,GAAG;IACjC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IACxF,IAAI,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;IACpD,QAAQ,KAAK,IAAI,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE;IACzE,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,OAAO,CAAC,CAAC;IACb,CAAC;;IC3JD;AACA;QAK+B,6BAAK;QAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;YANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;YAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;SAClD;QAzBe,4BAAkB,GAAW,oBAAoB,CAAC;QAClD,+BAAqB,GAAW,uBAAuB,CAAC;QACxD,qBAAW,GAAW,aAAa,CAAC;QAwBtD,gBAAC;KAAA,CA3B8B,KAAK;;ICNpC;AACA,IAQA;;;AAGA;QAAA;SAwGC;QAvGQ,mCAAW,GAAlB,UAAmB,OAAwB;YACzC,IAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;YAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;gBACzB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;aAC7E;YAED,IAAI,OAAO,CAAC,aAAa,EAAE;gBACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;aACvE;YAED,IAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;YACxC,IAAI,WAAW,EAAE;gBACf,IAAM,UAAQ,GAAG;oBACf,GAAG,CAAC,KAAK,EAAE,CAAC;iBACb,CAAC;gBACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;gBAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;oBACvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;wBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;qBACpD;iBACF,CAAC,CAAC;aACJ;YAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;YAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;YAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;gBACpB,IAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;gBAClC,IAAM,aAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;gBACnC,IAAM,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;oBAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;wBAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;qBACrD;yBAAM;wBACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;qBAChC;iBACF,CAAC;gBACF,KAAsB,UAAqB,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;oBAAxC,IAAM,OAAO,SAAA;oBAChB,IAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;oBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;wBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;4BACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;yBACxC;qBACF;yBAAM;wBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;qBACrC;iBACF;gBAED,OAAO,CAAC,IAAI,GAAG,aAAW,CAAC;gBAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;gBAC7B,IAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;;oBAEpE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iBACxC;aACF;YAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;YACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAC9C,KAAqB,UAA8B,EAA9B,KAAA,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;gBAAhD,IAAM,MAAM,SAAA;gBACf,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;aACjD;YACD,GAAG,CAAC,YAAY,GAAG,OAAO,CAAC,kBAAkB,GAAG,MAAM,GAAG,MAAM,CAAC;;YAGhE,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;YAE3D,IAAI,OAAO,CAAC,kBAAkB,EAAE;gBAC9B,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;oBACjC,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;;wBAEvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;4BACtD,IAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,UAAC,OAAO,EAAE,MAAM;gCACjD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;oCAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;iCACvB,CAAC,CAAC;gCACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;6BAC7C,CAAC,CAAC;4BACH,OAAO,CAAC;gCACN,OAAO,SAAA;gCACP,MAAM,EAAE,GAAG,CAAC,MAAM;gCAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;gCAC1B,QAAQ,UAAA;6BACT,CAAC,CAAC;yBACJ;qBACF,CAAC,CAAC;oBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;iBAC7C,CAAC,CAAC;aACJ;iBAAM;gBACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;oBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;wBAC3B,OAAA,OAAO,CAAC;4BACN,OAAO,SAAA;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;4BAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;yBAC7B,CAAC;qBAAA,CACH,CAAC;oBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;iBAC7C,CAAC,CAAC;aACJ;SACF;QACH,oBAAC;IAAD,CAAC,IAAA;IAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;QAEpD,IAAI,QAAQ,EAAE;YACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,UAAC,QAAQ;gBACxC,OAAA,QAAQ,CAAC;oBACP,WAAW,EAAE,QAAQ,CAAC,MAAM;iBAC7B,CAAC;aAAA,CACH,CAAC;SACH;IACH,CAAC;IAED;AACA,aAAgB,YAAY,CAAC,GAAmB;QAC9C,IAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;QAC1C,IAAM,WAAW,GAAG,GAAG;aACpB,qBAAqB,EAAE;aACvB,IAAI,EAAE;aACN,KAAK,CAAC,SAAS,CAAC,CAAC;QACpB,KAAmB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;YAA3B,IAAM,IAAI,oBAAA;YACb,IAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YAChC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;YACxC,IAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;YAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;SAC9C;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;QAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;YAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,+BAA6B,OAAO,CAAC,GAAK,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;SAAA,CACF,CAAC;QACF,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;YAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CAAC,yBAAyB,EAAE,SAAS,CAAC,qBAAqB,EAAE,SAAS,EAAE,OAAO,CAAC,CAC9F;SAAA,CACF,CAAC;QACF,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE;YAC9B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,gBAAc,GAAG,CAAC,OAAO,gBAAa,EACtC,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;SAAA,CACF,CAAC;IACJ,CAAC;;ICjLD;AACA,IAKA,WAAY,oBAAoB;;;;QAI9B,6DAAG,CAAA;;;;QAKH,iEAAK,CAAA;;;;QAKL,qEAAO,CAAA;;;;QAKP,+DAAI,CAAA;IACN,CAAC,EApBWG,4BAAoB,KAApBA,4BAAoB,QAoB/B;;IC1BD;IACA;IA2EA;;;;;AAKA,aAAgB,iBAAiB,CAAC,UAAmB;;;;;;QAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;QACF,QACE,cAAc;YACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;aAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;IACJ,CAAC;;IChGD;IACA;IAkDA;;;;;AAKA,aAAgB,0BAA0B,CAAC,SAA6B;QACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;IACnF,CAAC;AAED,aAAgB,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;QAEd,IAAI,MAAc,CAAC;QACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;YACrC,MAAM,GAAG,aAAa,CAAC;SACxB;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAClC;aAAM;YACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;SACjC;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;;ICzED;AACA,aAmFgB,iBAAiB,CAAC,aAA4B;QAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;QACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;YAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;YACjF,IACE,iBAAiB,CAAC,UAAU;gBAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;gBACA,MAAM,GAAG,IAAI,CAAC;gBACd,MAAM;aACP;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;;ICjGD;IACA;;IAEA,IAAM,MAAM,GAAG,IAAI,SAAS,EAAE,CAAC;IAE/B;IACA;IACA;IACA;IACA;IACA;IACA,IAAI,QAA2D,CAAC;IAChE,IAAI;QACF,IAAI,OAAO,IAAI,CAAC,YAAY,KAAK,WAAW,EAAE;YAC5C,QAAQ,GAAG,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,+BAA+B,EAAE;gBACzE,UAAU,EAAE,UAAC,CAAM,IAAK,OAAA,CAAC,GAAA;aAC1B,CAAC,CAAC;SACJ;KACF;IAAC,OAAO,CAAC,EAAE;QACV,OAAO,CAAC,IAAI,CAAC,uEAAuE,CAAC,CAAC;KACvF;AAED,aAAgB,QAAQ,CAAC,GAAW;;QAClC,IAAI;YACF,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,QAAE,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,GAAG,oCAAK,GAAG,GAAa,iBAAiB,CAAC,CAAC;YACpG,YAAY,CAAC,GAAG,CAAC,CAAC;YAElB,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3C,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SAC5B;IACH,CAAC;IAED,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI;QACF,IAAM,UAAU,UAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS,oCAAK,SAAS,CAAW,CAAC;QAC5E,OAAO,SACL,MAAM,CAAC,eAAe,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;aAClF,YAAa,mCAAI,EAAE,CAAC;KAC1B;IAAC,OAAO,OAAO,EAAE;;KAEjB;IAED,SAAS,YAAY,CAAC,GAAa;QACjC,IAAI,OAAO,EAAE;YACX,IAAM,YAAY,GAAG,GAAG,CAAC,sBAAsB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACxE,IAAI,YAAY,CAAC,MAAM,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAE,CAAC,SAAS,CAAC,CAAC;aAClD;SACF;IACH,CAAC;IAED,SAAS,SAAS,CAAC,IAAU;QAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;IACxC,CAAC;IAED;;;;IAIA,SAAS,uBAAuB,CAAC,IAAU;QACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,GAAG,IAAI,GAAG,SAAS,CAAC;IACpE,CAAC;IAED,SAAS,WAAW,CAAC,IAAU;QAC7B,IAAI,MAAM,GAAQ,EAAE,CAAC;QAErB,IAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QAEtD,IAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAChD,IAAM,kBAAkB,GACtB,CAAC,cAAc;YACb,cAAc,KAAK,CAAC;YACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;YAC1C,cAAc,CAAC,SAAS;YAC1B,SAAS,CAAC;QAEZ,IAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;QACjF,IAAI,qBAAqB,EAAE;YACzB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAChE,IAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;gBACjD,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;aAC7C;YAED,IAAI,kBAAkB,EAAE;gBACtB,MAAM,CAAC,GAAG,CAAC,GAAG,kBAAkB,CAAC;aAClC;SACF;aAAM,IAAI,cAAc,KAAK,CAAC,EAAE;YAC/B,MAAM,GAAG,EAAE,CAAC;SACb;aAAM,IAAI,kBAAkB,EAAE;YAC7B,MAAM,GAAG,kBAAkB,CAAC;SAC7B;QAED,IAAI,CAAC,kBAAkB,EAAE;YACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;gBACvC,IAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;;gBAEjC,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;oBACrC,IAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,CAAC,CAAC;oBAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;wBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;qBACtC;yBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;wBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;qBAC1C;yBAAM;wBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;qBAChE;iBACF;aACF;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;IACA,IAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IACrE,IAAM,UAAU,GAAG,IAAI,aAAa,EAAE,CAAC;AAEvC,aAAgB,YAAY,CAAC,GAAQ,EAAE,IAA4B;QACjE,IAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,KAAK,MAAM,CAAC;QACnD,IAAM,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;QACxC,QACE,yDAAyD,GAAG,UAAU,CAAC,iBAAiB,CAAC,GAAG,CAAC,EAC7F;IACJ,CAAC;IAED,SAAS,eAAe,CAAC,KAAgD;QACvE,IAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAkB,UAAkB,EAAlB,KAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAlB,cAAkB,EAAlB,IAAkB,EAAE;YAAjC,IAAM,GAAG,SAAA;YACZ,IAAM,IAAI,GAAG,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;YACtC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;YACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACnB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB;QAC9C,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,SAAS,EAAE;YAClF,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;YAClC,OAAO,CAAC,IAAI,CAAC,CAAC;SACf;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YAC7B,IAAM,MAAM,GAAG,EAAE,CAAC;YAClB,KAAwB,UAAG,EAAH,WAAG,EAAH,iBAAG,EAAH,IAAG,EAAE;gBAAxB,IAAM,SAAS,YAAA;gBAClB,KAAoB,UAAiC,EAAjC,KAAA,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC,EAAjC,cAAiC,EAAjC,IAAiC,EAAE;oBAAlD,IAAM,KAAK,SAAA;oBACd,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;iBACpB;aACF;YACD,OAAO,MAAM,CAAC;SACf;aAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;YAClC,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;YAC5C,KAAkB,UAAgB,EAAhB,KAAA,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAhB,cAAgB,EAAhB,IAAgB,EAAE;gBAA/B,IAAM,GAAG,SAAA;gBACZ,IAAI,GAAG,KAAK,GAAG,EAAE;oBACf,KAAmB,UAAyB,EAAzB,KAAA,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;wBAAzC,IAAM,IAAI,SAAA;wBACb,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;qBACpC;iBACF;qBAAM;oBACL,KAAoB,UAAwB,EAAxB,KAAA,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;wBAAzC,IAAM,KAAK,SAAA;wBACd,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;qBACzB;iBACF;aACF;YACD,OAAO,CAAC,IAAI,CAAC,CAAC;SACf;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,0CAAwC,GAAK,CAAC,CAAC;SAChE;IACH,CAAC;;ICxKD;AACA;QAmBE,2BACW,WAA0B,EAC1B,QAAkC;YADlC,gBAAW,GAAX,WAAW,CAAe;YAC1B,aAAQ,GAAR,QAAQ,CAA0B;SACzC;;;;;;QASG,qCAAS,GAAhB,UAAiB,QAA8B;YAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;SAC1C;;;;;;;QAQM,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;YACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACtC;QACH,wBAAC;IAAD,CAAC,IAAA;IAsBD;;;AAGA;QACE,8BAAoB,OAA4B;YAA5B,YAAO,GAAP,OAAO,CAAqB;SAAI;;;;;;QAO7C,wCAAS,GAAhB,UAAiB,QAA8B;YAC7C,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;gBACd,QAAQ,KAAKA,4BAAoB,CAAC,GAAG;gBACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;SACH;;;;;;;QAQM,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;YACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;gBAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;aACrC;SACF;QACH,2BAAC;IAAD,CAAC;;ICjGD;AACA,IAmCA;;;;AAIA,aAAgB,qBAAqB,CACnC,2BAAyD;QAEzD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;aACpF;SACF,CAAC;IACJ,CAAC;AAED,IAAO,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,IAAO,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;IAElF;;;;IAIA;QAA2C,yCAAiB;QAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;YAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;YAJC,KAAI,CAAC,gBAAgB;gBACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;YAC/F,KAAI,CAAC,eAAe;gBAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;;SAC9F;QAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;oBAC/C,sBAAO,IAAI,CAAC,WAAW;6BACpB,WAAW,CAAC,OAAO,CAAC;6BACpB,IAAI,CAAC,UAAC,QAA+B;4BACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;yBAAA,CAC/E,EAAC;;;SACL;QACH,4BAAC;IAAD,CAxBA,CAA2C,iBAAiB,GAwB3D;IAED,SAAS,oBAAoB,CAC3B,cAAqC;QAErC,IAAI,MAAqC,CAAC;QAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;QACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;QACvE,IAAI,aAAa,EAAE;YACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;YAC1E,IAAI,CAAC,uBAAuB,EAAE;gBAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;aACzD;iBAAM;gBACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aACjE;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,yBAAyB,CAAC,cAAqC;QACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;QAC3C,IAAI,MAAe,CAAC;QACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,MAAM,GAAG,IAAI,CAAC;SACf;aAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;YACjD,MAAM,GAAG,iBAAiB,CAAC;SAC5B;aAAM;YACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;SAC5C;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,aAAgB,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;QAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;YAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;YAC7E,IAAI,iBAAiB,EAAE;gBACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;gBACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;oBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;oBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;oBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;yBAC/B,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;oBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;oBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;0BAC1D,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;0BACrC,CAAC,CAAC,YAAY,CAAC;oBACnB,IAAI,CAAC,oBAAoB,EAAE;wBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;wBAC/E,IAAI,mBAAmB,EAAE;4BACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;kCAChE,6BAA2B,UAAY;kCACtC,cAAc,CAAC,UAAqB,CAAC;4BAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;4BACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;4BAC9B,KAAK,CAAC,OAAO,GAAGC,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC3D,KAAK,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;4BAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;4BAC5E,IAAI;gCACF,IAAI,mBAAmB,EAAE;oCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;oCACjC,IACE,yBAAyB;wCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;wCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;4CAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;yCACjD;wCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;4CAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;yCACvC;wCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;4CAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;yCAC7C;qCACF;yCAAM;wCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;wCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;4CAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;yCAC3C;wCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;wCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;4CACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;yCACvC;qCACF;oCAED,IAAI,yBAAyB,EAAE;wCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;wCAClD,IACE,aAAa,CAAC,KAAK;4CACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;4CACA,kBAAkB;gDAChB,OAAO,mBAAmB,KAAK,QAAQ;sDACnC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;sDAC9D,EAAE,CAAC;yCACV;wCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;qCACH;iCACF;6BACF;4BAAC,OAAO,YAAY,EAAE;gCACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;6BAC7J;4BACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;yBAC9B;qBACF;yBAAM,IAAI,YAAY,EAAE;wBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;4BAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;4BACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;gCACpF,kBAAkB;oCAChB,OAAO,kBAAkB,KAAK,QAAQ;0CAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;0CAC3D,EAAE,CAAC;6BACV;4BACD,IAAI;gCACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;6BACH;4BAAC,OAAO,KAAK,EAAE;gCACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;gCACF,SAAS,CAAC,OAAO,GAAGD,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;gCAC/D,SAAS,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;gCACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;6BAClC;yBACF;6BAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;4BAE9C,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;yBAC7E;wBAED,IAAI,YAAY,CAAC,aAAa,EAAE;4BAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;yBACH;qBACF;iBACF;aACF;YACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;SACxC,CAAC,CAAC;IACL,CAAC;IAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;QAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;YACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;YACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;YAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;YACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;SAC1B,CAAC;QAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;YACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;YAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;kBAC5C,EAAE;kBACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,GAAA,CAAC,CAAC;YACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;gBAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EACjF;gBACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;oBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;oBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;iBAC5B,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACxB;iBAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EAAE;gBAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;qBAClB,IAAI,CAAC,UAAC,IAAI;oBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;oBACpC,OAAO,iBAAiB,CAAC;iBAC1B,CAAC;qBACD,KAAK,CAAC,YAAY,CAAC,CAAC;aACxB;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC5C,CAAC;;ICrSD;AACA,aAyBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;aACH;SACF,CAAC;IACJ,CAAC;IAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;IAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;IACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;IACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;IAEnD;;;;IAIA;QAA4C,0CAAiB;;;;;;;;;;QA2B3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;YAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;YAXC,SAAS,QAAQ,CAAC,CAAM;gBACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;aAC9B;YACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;YACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;YAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;kBAC9C,gBAAgB;kBAChB,iCAAiC,CAAC;YACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;kBAC9C,gBAAgB;kBAChB,iCAAiC,CAAC;;SACvC;QAEM,4CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAKC;YAJC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC;iBAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;SAC7E;QACH,6BAAC;IAAD,CAvDA,CAA4C,iBAAiB,GAuD5D;IAED;;;;;;;;IAQA,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;QAEpB,IACE,UAAU,IAAI,SAAS;aACtB,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;YACxC,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;YACA,OAAO,KAAK,CAAC;SACd;QAED,IAAI,YAAoB,CAAC;QACzB,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM;YACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;SAClD;QAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;;IAOA,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;QAEhB,IAAI,CAAC,SAAS,EAAE;YACd,SAAS,GAAG;gBACV,UAAU,EAAE,CAAC;gBACb,aAAa,EAAE,CAAC;aACjB,CAAC;SACH;QAED,IAAI,GAAG,EAAE;YACP,IAAI,SAAS,CAAC,KAAK,EAAE;gBACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;aAClC;YAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;SACvB;;QAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;QAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;YAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;QACxF,cAAc,IAAI,gBAAgB,CAAC;QAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;QAEF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;QAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;QAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;QAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;YAC7E,OAAOC,KACC,CAAC,SAAS,CAAC,aAAa,CAAC;iBAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAA,CAAC;iBAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,GAAA,CAAC;iBAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,GAAA,CAAC,CAAC;SACrE;aAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;YAEjD,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;gBACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;YACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SAC5B;aAAM;YACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;SAClC;IACH,CAAC;;IC3ND;AACA,aAYgB,6BAA6B,CAC3C,mBAA8C;QAA9C,oCAAA,EAAA,8CAA8C;QAE9C,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;aACpF;SACF,CAAC;IACJ,CAAC;IAED;QAAmD,iDAAiB;QAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;YAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;SAGrC;QAEM,mDAAW,GAAlB,UAAmB,OAAwB;YACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAEC,YAAkB,EAAE,CAAC,CAAC;aACtE;YACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QACH,oCAAC;IAAD,CAfA,CAAmD,iBAAiB,GAenE;;ICtCD;IACA;AAcA,aAAgB,sBAAsB;QACpC,OAAO,mBAAmB,CAAC;IAC7B,CAAC;AAED,aAAgB,uBAAuB;QACrC,IAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;QAChD,IAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,EAAE,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;SAChE,CAAC;QAEF,OAAO,CAAC,MAAM,CAAC,CAAC;IAClB,CAAC;;IC3BD;AACA,IAgBA,SAAS,cAAc;QACrB,IAAM,aAAa,GAAG;YACpB,GAAG,EAAE,YAAY;YACjB,KAAK,EAAE,SAAS,CAAC,aAAa;SAC/B,CAAC;QAEF,OAAO,CAAC,aAAa,CAAC,CAAC;IACzB,CAAC;IAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;QADpB,6BAAA,EAAA,kBAAkB;QAClB,+BAAA,EAAA,oBAAoB;QAEpB,OAAO,aAAa;aACjB,GAAG,CAAC,UAAC,IAAI;YACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,GAAG,EAAE,CAAC;YACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;SAC9B,CAAC;aACD,IAAI,CAAC,YAAY,CAAC,CAAC;IACxB,CAAC;AAED,IAAO,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,aAAgB,wBAAwB;QACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;QACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;QACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;QAC/E,OAAO,SAAS,CAAC;IACnB,CAAC;AAED,aAAgB,eAAe,CAAC,aAA6B;QAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,GAAG,sBAAsB,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC;QAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;cAC9C,wBAAwB,EAAE;cAC1B,aAAa,CAAC,KAAK,CAAC;QAE1B,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;aAC7D;SACF,CAAC;IACJ,CAAC;IAED;QAAqC,mCAAiB;QACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;YAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;YANU,iBAAW,GAAX,WAAW,CAAe;YAC1B,cAAQ,GAAR,QAAQ,CAA0B;YACjC,eAAS,GAAT,SAAS,CAAQ;YACjB,iBAAW,GAAX,WAAW,CAAQ;;SAG9B;QAED,qCAAW,GAAX,UAAY,OAAwB;YAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;YACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,4CAAkB,GAAlB,UAAmB,OAAwB;YACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACrC;YAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;gBAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;aACvD;SACF;QACH,sBAAC;IAAD,CAxBA,CAAqC,iBAAiB,GAwBrD;;ICvFD;AACA,IAMA;;;AAGA;QAAA;YACmB,cAAS,GAAwD,EAAE,CAAC;SAqHtF;;;;QAhHQ,sBAAG,GAAV;YACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;SAC/C;;;;;;QAOM,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;YACnD,IAAI,aAAa,EAAE;gBACjB,IAAI,cAAc,IAAI,SAAS,EAAE;oBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,GAAG,cAAc,CAAC,QAAQ,EAAE,CAAC;oBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;iBAC1C;qBAAM;oBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;iBACtC;aACF;SACF;;;;;QAMM,sBAAG,GAAV,UAAW,aAAqB;YAC9B,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;SAClE;;;;QAKM,2BAAQ,GAAf;YACE,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;gBAC1C,IAAI,MAAM,EAAE;oBACV,MAAM,IAAI,GAAG,CAAC;iBACf;gBACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;gBACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;oBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;oBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;wBAA/C,IAAM,qBAAqB,uBAAA;wBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;qBACpE;oBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACtC;qBAAM;oBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;iBAChD;aACF;YACD,OAAO,MAAM,CAAC;SACf;;;;QAKa,cAAK,GAAnB,UAAoB,IAAY;YAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;YAE9B,IAAI,IAAI,EAAE;gBACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,YAAY,GAAuB,eAAe,CAAC;gBAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;gBACvB,IAAI,cAAc,GAAG,EAAE,CAAC;gBACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;oBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;oBACzC,QAAQ,YAAY;wBAClB,KAAK,eAAe;4BAClB,QAAQ,gBAAgB;gCACtB,KAAK,GAAG;oCACN,YAAY,GAAG,gBAAgB,CAAC;oCAChC,MAAM;gCAER,KAAK,GAAG;oCACN,aAAa,GAAG,EAAE,CAAC;oCACnB,cAAc,GAAG,EAAE,CAAC;oCACpB,MAAM;gCAER;oCACE,aAAa,IAAI,gBAAgB,CAAC;oCAClC,MAAM;6BACT;4BACD,MAAM;wBAER,KAAK,gBAAgB;4BACnB,QAAQ,gBAAgB;gCACtB,KAAK,GAAG;oCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;oCAC1C,aAAa,GAAG,EAAE,CAAC;oCACnB,cAAc,GAAG,EAAE,CAAC;oCACpB,YAAY,GAAG,eAAe,CAAC;oCAC/B,MAAM;gCAER;oCACE,cAAc,IAAI,gBAAgB,CAAC;oCACnC,MAAM;6BACT;4BACD,MAAM;wBAER;4BACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;qBACzE;iBACF;gBACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;oBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;iBAC3C;aACF;YAED,OAAO,MAAM,CAAC;SACf;QACH,eAAC;IAAD,CAAC,IAAA;IAED;;;AAGA;QAAA;SAiPC;;;;;QAtOQ,8BAAS,GAAhB,UAAiB,MAA0B;YACzC,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;aAC1B;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;aAC5B;SACF;;;;QAKM,8BAAS,GAAhB;YACE,OAAO,IAAI,CAAC,OAAO,CAAC;SACrB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAwB;YACrC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;aAClC;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAiC;YAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;gBACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;aACnC;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAwB;YACrC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;gBACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;oBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;oBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;iBAC9E;qBAAM;oBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;iBACxB;aACF;SACF;;;;;QAMM,+BAAU,GAAjB,UAAkB,IAAwB;YACxC,IAAI,IAAI,EAAE;gBACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACrD,IAAI,WAAW,EAAE;oBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;wBAC9B,WAAW,IAAI,GAAG,CAAC;qBACpB;oBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;wBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;qBAC1B;oBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;iBAC3B;gBACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;QAKM,6BAAQ,GAAf,UAAgB,KAAyB;YACvC,IAAI,CAAC,KAAK,EAAE;gBACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;aACzB;iBAAM;gBACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;aACrC;SACF;;;;;;QAOM,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;YAC3E,IAAI,kBAAkB,EAAE;gBACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;oBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;iBAC9B;gBACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;aAC1D;SACF;;;;;QAMM,2CAAsB,GAA7B,UAA8B,kBAA0B;YACtD,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;SACtE;;;;QAKM,6BAAQ,GAAf;YACE,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;SACzD;;;;QAKO,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;YACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;YAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;gBACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;gBACxD,IAAI,KAAK,EAAE;oBACT,QAAQ,KAAK,CAAC,IAAI;wBAChB,KAAK,QAAQ;4BACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACvC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACrC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACrC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;gCAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;6BACxB;4BACD,MAAM;wBAER,KAAK,OAAO;4BACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;4BACzC,MAAM;wBAER;4BACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;qBAC/D;iBACF;aACF;SACF;QAEM,6BAAQ,GAAf;YACE,IAAI,MAAM,GAAG,EAAE,CAAC;YAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;aAChC;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;aACtB;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;aAC5B;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBAC/B,MAAM,IAAI,GAAG,CAAC;iBACf;gBACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;aACtB;YAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;gBACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;aACxC;YAED,OAAO,MAAM,CAAC;SACf;;;;;QAMM,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;YACzD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;aACvE;SACF;QAEa,gBAAK,GAAnB,UAAoB,IAAY;YAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;YAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;YACnC,OAAO,MAAM,CAAC;SACf;QACH,iBAAC;IAAD,CAAC,IAAA;IAMD;QACE,kBAAmC,IAAY,EAAkB,IAAkB;YAAhD,SAAI,GAAJ,IAAI,CAAQ;YAAkB,SAAI,GAAJ,IAAI,CAAc;SAAI;QAEzE,eAAM,GAApB,UAAqB,IAAY;YAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;SACrC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,cAAK,GAAnB,UAAoB,IAAY;YAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;SACpC;QACH,eAAC;IAAD,CAAC,IAAA;IAED;;;;AAIA,aAAgB,uBAAuB,CAAC,SAAiB;QACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACtD,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;aACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;aACrD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;IACJ,CAAC;IAED;;;IAGA;QAME,sBAA4B,KAAa,EAAE,KAAyB;YAAxC,UAAK,GAAL,KAAK,CAAQ;YACvC,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;YAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,GAAG,KAAK,GAAG,gBAAgB,CAAC;YACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;SACxB;;;;;QAMM,8BAAO,GAAd;YACE,OAAO,IAAI,CAAC,aAAa,CAAC;SAC3B;;;;QAKM,2BAAI,GAAX;YACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;gBAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;aAChC;iBAAM;gBACL,QAAQ,IAAI,CAAC,aAAa;oBACxB,KAAK,QAAQ;wBACX,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjB,MAAM;oBAER,KAAK,gBAAgB;wBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;wBACvB,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,OAAO;wBACV,SAAS,CAAC,IAAI,CAAC,CAAC;wBAChB,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;iBAC5E;aACF;YACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;SAC7B;QACH,mBAAC;IAAD,CAAC,IAAA;IAED;;;IAGA,SAAS,aAAa,CAAC,SAAuB;QAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;YACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;SACjD;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;IAGA,SAAS,mBAAmB,CAAC,SAAuB;QAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;IACzD,CAAC;IAED;;;IAGA,SAAS,mBAAmB,CAAC,SAAuB;QAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;IAClD,CAAC;IAED;;;;IAIA,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;QAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,CAAC,CAAC;aACV;YACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;SACjC;IACH,CAAC;IAED;;;;IAIA,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;QACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;QAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;YACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;SAClC;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;IACtE,CAAC;IAED;;;;IAIA,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;QACnF,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;YAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;gBAChC,MAAM;aACP;iBAAM;gBACL,MAAM,IAAI,gBAAgB,CAAC;gBAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;aAC1B;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;IAIA,SAAS,sBAAsB,CAAC,SAAuB;QACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,GAAA,CAAC,CAAC;IACzF,CAAC;IAED;;;;IAIA,SAAS,kBAAkB,CAAC,SAAuB;QAAE,+BAAkC;aAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;YAAlC,8CAAkC;;QACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CACvE,CAAC;IACJ,CAAC;IAED,SAAS,UAAU,CAAC,SAAuB;QACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;QACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;IACH,CAAC;IAED,SAAS,gBAAgB,CAAC,SAAuB;QAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;gBAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;gBACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;iBAAM;gBACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;SACF;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;gBAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;iBAAM;gBACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;aACnC;SACF;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;SAC7B;QAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;QAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;QACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,SAAS,CAAC,SAAuB;QACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;QAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;QAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;IACnC,CAAC;;IClpBD;AACA,IA4BO,IAAM,sBAAsB,GAAoB;QACrD,eAAe,EAAE,IAAI;QACrB,UAAU,EAAE,EAAE;KACf,CAAC;AAEF,aAAgB,cAAc,CAAC,cAAmB;QAAnB,+BAAA,EAAA,mBAAmB;QAChD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;aAChE;SACF,CAAC;IACJ,CAAC;IAED;QAAoC,kCAAiB;QACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;YAAf,2BAAA,EAAA,eAAe;YAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHU,gBAAU,GAAV,UAAU,CAAK;;SAGzB;QAEM,oCAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC;iBACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;SAC1D;QACH,qBAAC;IAAD,CAdA,CAAoC,iBAAiB,GAcpD;IAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;QAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;QACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QACxD,IACE,cAAc;aACb,MAAM,KAAK,GAAG;iBACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;iBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;iBACnE,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;gBAC7C,MAAM,KAAK,GAAG,CAAC;aAChB,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa;iBAC5E,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;YACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;YAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;;YAKjC,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,KAAK,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;gBACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;gBACvB,OAAO,OAAO,CAAC,IAAI,CAAC;aACrB;YAED,OAAO,MAAM,CAAC,WAAW;iBACtB,WAAW,CAAC,OAAO,CAAC;iBACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,GAAA,CAAC;iBAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;SACpD;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;IAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;;;QAGvE,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;YACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;YAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;SACzB;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;;aC5Fe,oBAAoB,CAAC,YAAiB;QAAjB,6BAAA,EAAA,iBAAiB;QACpD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;aACpE;SACF,CAAC;IACJ,CAAC;IAED;QAA0C,wCAAiB;QACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;YAAlB,8BAAA,EAAA,kBAAkB;YAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHU,mBAAa,GAAb,aAAa,CAAK;;SAG5B;QAEM,0CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;SAClE;QACH,2BAAC;IAAD,CAdA,CAA0C,iBAAiB,GAc1D;IAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;QAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;YACxE,IAAI,MAAM,EAAE;gBACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;qBAI3C,KAAK,CAAC,cAAM,OAAA,KAAK,GAAA,CAAC;qBAClB,IAAI,CAAC,UAAC,kBAAkB;oBACvB,IAAI,kBAAkB,EAAE;;;wBAGtB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;wBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;qBACxD;oBACD,OAAO,QAAQ,CAAC;iBACjB,CAAC,EACJ;aACH;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;IAED;;;;;;IAMA,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;QAAnB,4BAAA,EAAA,mBAAmB;QAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;QAC5D,IAAI,WAAW,EAAE;YACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;SACtC;;;QAID,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;QAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;QAE1E,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;;;;;IAMA,SAAS,yBAAyB,CAAC,IAAY;QAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;QACzB,IAAI,IAAI,EAAE;YACR,IAAI;gBACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aACjC;YAAC,OAAO,GAAG,EAAE;;aAEb;YACD,IACE,YAAY;gBACZ,YAAY,CAAC,KAAK;gBAClB,YAAY,CAAC,KAAK,CAAC,OAAO;gBAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;gBACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;gBACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;gBAC/D,IAAI,QAAQ,EAAE;oBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;iBACzB;aACF;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;IAMA,SAAS,sBAAsB,CAAC,GAAW;QACzC,IAAI,MAAM,CAAC;QACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;QAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;SACtB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;SAClF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;;;IASA,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;QAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;QACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;QAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;QACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;QAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;QAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;YAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;aAC7F;YACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;SAC/D,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;;IASA,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;QAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;QAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;QACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;QAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;YACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;YAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;gBACrF,OAAO,IAAI,CAAC;aACb;iBAAM;gBACL,OAAOC,KACC,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;qBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,GAAA,CAAC,CAAC;aACpE;SACF,CAAC,CAAC;IACL,CAAC;;ICpMD;AACA,aAYgB,aAAa,CAC3B,sBAAgD;QAEhD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;aACvE;SACF,CAAC;IACJ,CAAC;IAED;QAAmC,iCAAiB;QAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;YAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;SAGxD;QAED,mCAAW,GAAX,UAAY,OAAwB;YAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SACzD;QAEM,mCAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;gBAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;aAAA,CAC1C,CAAC;SACH;QACH,oBAAC;IAAD,CAlBA,CAAmC,iBAAiB,GAkBnD;;ICzCD;AACA,aAwBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;aACH;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;IAUA;QAA4C,0CAAiB;QAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;YAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;YAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;YAC1C,gCAA0B,GAAG,CAAC,CAAC;YAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;YAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;YAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,GAAG,KAAI,CAAC,0BAA0B,CAAC;YAChG,KAAI,CAAC,aAAa;gBAChB,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,GAAG,KAAI,CAAC,6BAA6B,CAAC;YACzF,KAAI,CAAC,gBAAgB;gBACnB,OAAO,gBAAgB,KAAK,QAAQ;sBAChC,gBAAgB;sBAChB,KAAI,CAAC,iCAAiC,CAAC;YAC7C,KAAI,CAAC,gBAAgB;gBACnB,OAAO,gBAAgB,KAAK,QAAQ;sBAChC,gBAAgB;sBAChB,KAAI,CAAC,iCAAiC,CAAC;;SAC9C;QAEM,4CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAAC,OAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;SAClE;QACH,6BAAC;IAAD,CArCA,CAA4C,iBAAiB,GAqC5D;IAED;;;;;;;IAOA,SAASC,aAAW,CAAC,MAA8B,EAAE,SAAoB;QACvE,IAAI,YAAY,CAAC;QACjB,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM;YACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;SAClD;QACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;IAMA,SAASC,iBAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;QAEhB,IAAI,CAAC,SAAS,EAAE;YACd,SAAS,GAAG;gBACV,UAAU,EAAE,CAAC;gBACb,aAAa,EAAE,CAAC;aACjB,CAAC;SACH;QAED,IAAI,GAAG,EAAE;YACP,IAAI,SAAS,CAAC,KAAK,EAAE;gBACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;aAClC;YAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;SACvB;;QAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;QAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;QACxF,cAAc,IAAI,gBAAgB,CAAC;QAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;QAEF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAeF,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;wBAErB,SAAS,GAAGE,iBAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;8BAElD,GAAG;4BACH,GAAG,CAAC,IAAI;4BACRD,aAAW,CAAC,MAAM,EAAE,SAAS,CAAC;6BAC7B,GAAG,CAAC,IAAI,KAAK,WAAW;gCACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;gCAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;gCAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;gCACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;wBAItB,qBAAME,KAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;wBAA1C,SAA0C,CAAC;wBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;wBAEvD,sBAAOH,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;wBAGrE,IAAI,GAAG,EAAE;;4BAEP,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;yBACxC;wBACD,sBAAO,iBAAiB,EAAC;;;;;KAE5B;;IC1LD;AACA,IAKA,WAAY,qBAAqB;QAC/B,kCAAS,CAAA;QACT,kCAAS,CAAA;QACT,mCAAU,CAAA;QACV,oCAAW,CAAA;QACX,wCAAe,CAAA;IACjB,CAAC,EANWI,6BAAqB,KAArBA,6BAAqB,QAMhC;;ICZD;AACA,IAYA,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,aAAgB,WAAW,CAAC,cAA8B;QACxD,OAAO;YACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;gBACrE,MAAM,0BAA0B,CAAC;aAClC;SACF,CAAC;IACJ,CAAC;IAED;QAAiC,+BAAiB;QAChD,qBAAY,UAAyB,EAAE,OAAiC;YAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,MAAM,0BAA0B,CAAC;SAClC;QAEM,iCAAW,GAAlB,UAAmB,QAAyB;YAC1C,MAAM,0BAA0B,CAAC;SAClC;QACH,kBAAC;IAAD,CATA,CAAiC,iBAAiB,GASjD;;IChCD;AACA,IAYA,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,aAAgB,uBAAuB,CAAC,SAAkB;QACxD,OAAO,SAAS,CAAC;IACnB,CAAC;AAED,aAAgB,WAAW,CAAC,cAA8B;QACxD,OAAO;YACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;gBACrE,MAAM,0BAA0B,CAAC;aAClC;SACF,CAAC;IACJ,CAAC;IAED;QAAiC,+BAAiB;QAChD,qBAAY,UAAyB,EAAE,OAAiC;YAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,MAAM,0BAA0B,CAAC;SAClC;QAEM,iCAAW,GAAlB,UAAmB,QAAyB;YAC1C,MAAM,0BAA0B,CAAC;SAClC;QACH,kBAAC;IAAD,CATA,CAAiC,iBAAiB,GASjD;;ICpCD;AACA,IAaA,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;IACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,aAAgB,qBAAqB,CACnC,UAAwC;QAAxC,2BAAA,EAAA,gCAAwC;QAExC,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;aACnE;SACF,CAAC;IACJ,CAAC;IAED;;;;;;IAMA;QAA2C,yCAAiB;QAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;YAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;SAC9B;QAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;oBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;4BACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;yBAC7C,CAAC,EAAC;;;SACJ;QAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;4BAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;gCACvD,sBAAO,YAAY,EAAC;6BACrB;4BAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;kCAEE,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;4BAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;iCACE,SAAS,EAAT,wBAAS;4BACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;4BAAtB,SAAsB,CAAC;4BACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;4BAArD,GAAG,GAAG,SAA+C;4BAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;gCAIxD,sBAAO,YAAY,EAAC;;;;SACrB;QAEa,2CAAqB,GAAnC,UAAoC,WAAmB;YACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;YAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;gBACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;aACrE;iBAAM;gBACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;aACnC;SACF;QAEa,+CAAyB,GAAvC,UAAwC,WAAmB;YACzD,IAAI;gBACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;gBAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;gBAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;gBAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;aAC9C;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,SAAS,CAAC;aAClB;SACF;QACH,4BAAC;IAAD,CA7DA,CAA2C,iBAAiB,GA6D3D;;ICxGD;AACA,IASA,IAAM,4BAA4B,GAAG,QAAQ,CAAC;IAE9C;;;AAGA,IAAO,IAAM,6BAA6B,GAAG;QAC3C,gCAAgC;QAChC,qCAAqC;QACrC,sCAAsC;QACtC,gCAAgC;KACjC,CAAC;IAEF;;;;AAIA;QAGE,wCACE,oBAAqC,EACrC,MAAmE;YAAnE,uBAAA,EAAA,gDAAmE;YAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;YACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;SACtB;QAEY,iDAAQ,GAArB;;;;;gCACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;4BAAnE,WAAW,GAAG,SAAqD;4BACzE,IAAI,WAAW,KAAK,IAAI,EAAE;gCAClB,MAAM,GAAkB;oCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;oCAC9B,SAAS,EAAE,4BAA4B;oCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;iCAC1C,CAAC;gCACF,sBAAO,MAAM,EAAC;6BACf;iCAAM;gCACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;6BAC/C;;;;SACF;QAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;gCACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;4BAArC,aAAa,GAAG,SAAqB;4BAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrBC,SAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;4BACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;SACrC;QACH,qCAAC;IAAD,CAAC;;IC3DD;AACA,IA+JA;;;;AAIA;;;;;;;QAgCE,uBACE,WAAwD,EACxD,OAA8B;YAE9B,IAAI,CAAC,OAAO,EAAE;gBACZ,OAAO,GAAG,EAAE,CAAC;aACd;YAED,IAAI,OAAO,CAAC,OAAO,EAAE;gBACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;aAChC;YAED,IAAI,wBAA8D,CAAC;YACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;gBAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;gBAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;oBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;iBACvC;gBACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;aACnF;iBAAM;gBACL,wBAAwB,GAAG,WAAW,CAAC;aACxC;YAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;gBACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;aAC/E;YAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;YACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAIC,aAAiB,EAAE,CAAC;YACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;YAElF,IAAI,sBAA8C,CAAC;YACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;gBACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;aACzD;iBAAM;gBACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;gBACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;oBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;oBACpF,IAAI,yBAAyB,EAAE;wBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;qBACpD;iBACF;aACF;YACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;SACvD;;;;QAKD,mCAAW,GAAX,UAAY,OAAgD;YAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;gBAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;YAED,IAAI,WAA4B,CAAC;YACjC,IAAI;gBACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;oBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;oBACpC,WAAW,GAAG,OAAO,CAAC;iBACvB;qBAAM;oBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;oBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAC5C;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aAC9B;YAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;YACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;oBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;iBACH;aACF;YACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;SAC9C;;;;;;;QAQD,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;YAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;gBACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;gBACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;aACxC;YAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;YAEtC,IAAI,MAA6B,CAAC;YAClC,IAAI;gBACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;gBAC1E,IAAI,CAAC,OAAO,EAAE;oBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;iBACH;gBAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;gBAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;gBAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBACzD,IAAI,aAAa,CAAC,IAAI,EAAE;oBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;iBAC3C;gBACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;oBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;wBAAnD,IAAM,YAAY,SAAA;wBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;wBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;4BAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;yBAC3D;wBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;qBACH;iBACF;gBACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;wBAAvD,IAAM,cAAc,SAAA;wBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;4BACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;4BACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;gCAChD,IAAI,cAAc,CAAC,gBAAgB,KAAKF,6BAAqB,CAAC,KAAK,EAAE;oCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;wCACpC,mBAAmB,GAAG,EAAE,CAAC;qCAC1B;yCAAM;wCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;4CACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;4CACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;yCACvE;qCACF;iCACF;qCAAM,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;oCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;oCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;iCACjF;6BACF;4BACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;gCAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;4CACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;4CACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;yCAC7E;qCACF;iCACF;qCAAM;oCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;iCAC/D;6BACF;4BACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;gCAC5C,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;gCAC/D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;4BACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;yBACH;qBACF;iBACF;gBACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;gBAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;gBACzE,IAAI,WAAW,EAAE;oBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;iBACtD;gBAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;oBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;wBAAzD,IAAM,eAAe,SAAA;wBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,IAAI,WAAW,IAAI,SAAS,EAAE;4BAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;4BACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;iCACxE,sBAAsB,CAAC;4BAC1B,IAAI,sBAAsB,EAAE;gCAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oCAAvC,IAAM,GAAG,SAAA;oCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;iCACzE;6BACF;iCAAM;gCACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;oCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;6BACH;yBACF;qBACF;iBACF;gBAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;gBAC3E,IAAI,OAAO,EAAE;oBACX,IAAI,OAAO,CAAC,aAAa,EAAE;wBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;4BACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;yBACpF;qBACF;oBAED,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;qBAC/C;oBAED,IAAI,OAAO,CAAC,OAAO,EAAE;wBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;qBACvC;oBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;wBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;qBACzD;oBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;wBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;qBAC7D;iBACF;gBAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;gBAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;gBAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;oBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;iBACnE;gBAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;oBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;iBAAA,CAC1D,CAAC;aACH;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aAChC;YAED,IAAM,EAAE,GAAG,QAAQ,CAAC;YACpB,IAAI,EAAE,EAAE;gBACN,MAAM;;qBAEH,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,GAAA,CAAC;qBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;aAC5B;YAED,OAAO,MAAM,CAAC;SACf;QACH,oBAAC;IAAD,CAAC,IAAA;aAEe,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;QAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;YACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;YAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;YAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;YACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;YACtC,IAAI;gBACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;oBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;oBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;oBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;oBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;wBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BG,kBAAwB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;yBACH;6BAAM,IAAI,CAAC,QAAQ,EAAE;4BACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;gCAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;6BACpC,CAAC,CAAC;yBACJ;qBACF;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;qBACrD;iBACF;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;aACH;SACF;aAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;YAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;gBAA7D,IAAM,iBAAiB,SAAA;gBAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;gBACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;oBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;oBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;iBACH;aACF;SACF;IACH,CAAC;IAED,SAAS,sBAAsB,CAAC,QAAa;QAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;IAC/C,CAAC;IAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;QAEjC,IAAI,MAAc,CAAC;QACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,MAAM,GAAG,KAAK,CAAC;SAChB;aAAM;YACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;YAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;gBAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;aACxB;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;QAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;QAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;YACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;SAClF;QAED,IAAI,WAAW,EAAE;YACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;gBACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aAC7B;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;aAC5C;SACF;QAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;QACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;QACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;YAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;SAC5F;QAED,IAAM,eAAe,yBAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;QACF,IAAI,eAAe,CAAC,eAAe,EAAE;YACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;SAC5D;QAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;YACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;YACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;SACzC;QAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;QAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;QACzE,IAAI,aAAa,EAAE;YACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,AAAa,CAAC,CAAC,CAAC;SAC5C;QAED,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;SACpD;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;AAID,IAkBA,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;QAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;IACJ,CAAC;AAED,aAAgB,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;QAEtB,IAAI,KAAU,CAAC;QACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;YACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;SACjC;QACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;oBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;iBACtC;qBAAM;oBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;oBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;wBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;qBACnF;oBAED,IAAI,eAAe,GAAG,KAAK,CAAC;oBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;wBACvC,eAAe;4BACb,eAAe,CAAC,QAAQ;iCACvB,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;qBAClE;oBACD,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;iBAC7F;;gBAGD,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;gBACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;aACnE;SACF;aAAM;YACL,IAAI,eAAe,CAAC,QAAQ,EAAE;gBAC5B,KAAK,GAAG,EAAE,CAAC;aACZ;YAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;gBACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;gBAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;gBAEF,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;gBACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;gBACxE,IAAI,aAAa,KAAK,SAAS,EAAE;oBAC/B,IAAI,CAAC,KAAK,EAAE;wBACV,KAAK,GAAG,EAAE,CAAC;qBACZ;oBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;iBACrC;aACF;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;QAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;QAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;YAEnD,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;gBACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;aACpC;iBAAM;gBACL,MAAM;aACP;SACF;QACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;YAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;YAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;SAC7B;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,aAAgB,eAAe,CAC7B,SAAgC,EAChC,YAA2C;QAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;QAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;QAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;YACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;gBACtC,KAAK,EAAE,SAAS;aACjB,CAAC;SAAA,CAAC;QAEL,IAAI,UAAU,EAAE;YACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;YACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;gBACzB,OAAO,oBAAoB,uBACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;aACJ;YAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;YAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,GAAA,CAChD,CAAC;YACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;;;gBAGjD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS,CAAC,UAAU,GAAG,EAAE,CAAC;gBACnF,IAAM,aAAa,GAAG,eAAI,UAAU,CAAyB,CAAC;gBAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;oBAA3C,IAAM,GAAG,SAAA;oBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;wBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;qBAChD;iBACF;gBAED,IAAI,aAAa,EAAE;oBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;wBAAzC,IAAM,GAAG,SAAA;wBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;qBACzC;iBACF;gBACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;gBACpC,OAAO,aAAa,CAAC;aACtB;YAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;gBACzD,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;aACJ;SACF;QAED,IACE,UAAU;YACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;YACnCC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;YAEA,OAAO,oBAAoB,uBACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;SACJ;QAED,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;IACL,CAAC;;ICn1BD;AACA,aAWgB,SAAS,CAAC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QACjD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;aACnD;SACF,CAAC;IACJ,CAAC;IAED;QAA+B,6BAAiB;QAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;YAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;YAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;SACtB;QAEM,+BAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAEC;YADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;SAC9F;QACH,gBAAC;IAAD,CAfA,CAA+B,iBAAiB,GAe/C;IAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;QAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;QAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;QAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;QACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;QAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;;IC9CD;AACA,IAOA,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;IAClD,IAAMC,8BAA4B,GAAG,QAAQ,CAAC;IAE9C;;;AAGA;;;;;;;;QAWE,0BAAY,KAAa,EAAE,mBAA0D;YAA1D,oCAAA,EAAA,oDAA0D;YATrF,wBAAmB,GAAWA,8BAA4B,CAAC;YAUzD,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;YACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;SAChD;;;;;;;QAQD,sCAAW,GAAX,UAAY,WAA4B;YACtC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;YACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,uBAAC;IAAD,CAAC;;IC/CD;AACA,IAOA,IAAMC,iBAAe,GAAG,SAAS,CAAC,eAAe,CAAC;IAClD,IAAMD,8BAA4B,GAAG,OAAO,CAAC;AAE7C;;;;;;;;;QAaE,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;YAA1D,oCAAA,EAAA,oDAA0D;YAb5D,wBAAmB,GAAWA,8BAA4B,CAAC;YAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;SAChD;;;;;;;QAQD,oDAAW,GAAX,UAAY,WAA4B;YACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;YACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAIE,YAAmB,CAAC,WAAW,CAAG,CAAC;YAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAACD,iBAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;YAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,qCAAC;IAAD,CAAC;;ICrDD;AACA,IAqBA;;;AAGA;;;;;QAcE,2BAAY,OAAgC;YAC1C,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;aACH;YACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;;;;;;;QAQD,uCAAW,GAAX,UAAY,WAA4B;YACtC,IAAI,CAAC,WAAW,EAAE;gBAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;aACH;YAED,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;oBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;iBACzC;gBACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;oBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;iBAChE;aACF;YAED,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;oBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;iBAC/E;gBACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;oBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;oBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;wBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;qBACxB;oBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;iBAClD;aACF;YAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,wBAAC;IAAD,CAAC;;ICxFD;AACA;QAIsC,oCAAiB;;;;;;;QAOrD,0BAAY,QAAgB;YAA5B,iBAUC;YATC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;gBAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAM,OAAO,GAA4B;gBACvC,QAAQ,EAAE;oBACR,aAAa,EAAE,QAAQ;iBACxB;aACF,CAAC;YACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;SAChB;QACH,uBAAC;IAAD,CAlBA,CAAsC,iBAAiB;;ICLvD;AACA;QAIuC,qCAAiB;;;;;;;QAOtD,2BAAY,SAAiB;YAA7B,iBAUC;YATC,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;gBAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;aACtF;YACD,IAAM,OAAO,GAA4B;gBACvC,QAAQ,EAAE;oBACR,aAAa,EAAE,SAAS;iBACzB;aACF,CAAC;YACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;SAChB;QACH,wBAAC;IAAD,CAlBA,CAAuC,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js b/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js new file mode 100644 index 0000000..bfac679 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js @@ -0,0 +1,19 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e=e||self).msRest={})}(this,(function(e){"use strict";function t(e){return e.toLowerCase()}function r(e){return!(!e||"object"!=typeof e)&&("function"==typeof e.rawHeaders&&"function"==typeof e.clone&&"function"==typeof e.get&&"function"==typeof e.set&&"function"==typeof e.contains&&"function"==typeof e.remove&&"function"==typeof e.headersArray&&"function"==typeof e.headerValues&&"function"==typeof e.headerNames&&"function"==typeof e.toJson)}var n,o=function(){function e(e){if(this._headersMap={},e)for(var t in e)this.set(t,e[t])}return e.prototype.set=function(e,r){this._headersMap[t(e)]={name:e,value:r.toString()}},e.prototype.get=function(e){var r=this._headersMap[t(e)];return r?r.value:void 0},e.prototype.contains=function(e){return!!this._headersMap[t(e)]},e.prototype.remove=function(e){var r=this.contains(e);return delete this._headersMap[t(e)],r},e.prototype.rawHeaders=function(){var e={};for(var t in this._headersMap){var r=this._headersMap[t];e[r.name.toLowerCase()]=r.value}return e},e.prototype.headersArray=function(){var e=[];for(var t in this._headersMap)e.push(this._headersMap[t]);return e},e.prototype.headerNames=function(){for(var e=[],t=this.headersArray(),r=0;r1&&void 0!==arguments[1]?arguments[1]:0,r=(f[e[t+0]]+f[e[t+1]]+f[e[t+2]]+f[e[t+3]]+"-"+f[e[t+4]]+f[e[t+5]]+"-"+f[e[t+6]]+f[e[t+7]]+"-"+f[e[t+8]]+f[e[t+9]]+"-"+f[e[t+10]]+f[e[t+11]]+f[e[t+12]]+f[e[t+13]]+f[e[t+14]]+f[e[t+15]]).toLowerCase();if(!c(r))throw TypeError("Stringified UUID is invalid");return r}(n)}var d={msRestVersion:"2.7.0",HTTP:"http:",HTTPS:"https:",HTTP_PROXY:"HTTP_PROXY",HTTPS_PROXY:"HTTPS_PROXY",NO_PROXY:"NO_PROXY",ALL_PROXY:"ALL_PROXY",HttpConstants:{HttpVerbs:{PUT:"PUT",GET:"GET",DELETE:"DELETE",POST:"POST",MERGE:"MERGE",HEAD:"HEAD",PATCH:"PATCH"},StatusCodes:{TooManyRequests:429}},HeaderConstants:{AUTHORIZATION:"authorization",AUTHORIZATION_SCHEME:"Bearer",RETRY_AFTER:"Retry-After",USER_AGENT:"User-Agent"}},y="undefined"!=typeof process&&!!process.version&&!!process.versions&&!!process.versions.node;function m(e){var t={};return t.body=e.bodyAsText,t.headers=e.headers,t.status=e.status,t}function v(e){var t=e.clone();return t.headers&&t.headers.remove("authorization"),t}function g(e){return new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$","ig").test(e)}function b(){return h()}function w(e,t){return new Promise((function(r){return setTimeout((function(){return r(t)}),e)}))}var E=/^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;function R(e){return E.test(e)}function T(e,t,r){return e&&t?e.split(t).join(r||""):e}var _=function(){function e(e,t){void 0===e&&(e={}),this.modelMappers=e,this.isXML=t}return e.prototype.validateConstraints=function(e,t,r){var n=function(e,n){throw new Error('"'+r+'" with value "'+t+'" should satisfy the constraint "'+e+'": '+n+".")};if(e.constraints&&null!=t){var o=e.constraints,i=o.ExclusiveMaximum,a=o.ExclusiveMinimum,s=o.InclusiveMaximum,u=o.InclusiveMinimum,l=o.MaxItems,c=o.MaxLength,f=o.MinItems,p=o.MinLength,h=o.MultipleOf,d=o.Pattern,y=o.UniqueItems;if(null!=i&&t>=i&&n("ExclusiveMaximum",i),null!=a&&t<=a&&n("ExclusiveMinimum",a),null!=s&&t>s&&n("InclusiveMaximum",s),null!=u&&tl&&n("MaxItems",l),null!=c&&t.length>c&&n("MaxLength",c),null!=f&&t.length=0&&e[r-1]===t;)--r;return e.substr(0,r)}(i(e),"=").replace(/\+/g,"-").replace(/\//g,"_")}(t)}return t}(r,t):null!==o.match(/^Sequence$/gi)?n=function(e,t,r,n){if(!Array.isArray(r))throw new Error(n+" must be of type Array.");var o=t.type.element;if(!o||"object"!=typeof o)throw new Error('element" metadata for an Array must be defined in the mapper and it must of type "object" in '+n+".");for(var i=[],a=0;a0&&o[o.length-1])||6!==i[0]&&2!==i[0])){a=0;continue}if(3===i[0]&&(!o||i[1]>o[0]&&i[1]'+te.serializeToString(r)}function ne(e){for(var t=[],r=0,n=Object.keys(e);r=200&&r.status<300);a.headersMapper&&(e.parsedHeaders=t.serializer.deserialize(a.headersMapper,e.headers.rawHeaders(),"operationRes.parsedHeaders"))}}else{var u=t.responses.default;if(u){var l=X(t)?"Unexpected status code: "+n:e.bodyAsText,c=new k(l);c.statusCode=n,c.request=v(e.request),c.response=m(e);var f=e.parsedBody;try{if(f){var p=u.bodyMapper;if(p&&"CloudError"===p.serializedName)f.error&&(f=f.error),f.code&&(c.code=f.code),f.message&&(c.message=f.message);else{var h=f;f.error&&(h=f.error),c.code=h.code,h.message&&(c.message=h.message)}if(p){var d=f;t.isXML&&p.type.name===q.Sequence&&(d="object"==typeof f?f[p.xmlElementName]:[]),c.body=t.serializer.deserialize(p,d,"error.body")}}}catch(t){c.message='Error "'+t.message+'" occurred in deserializing the responseBody - "'+e.bodyAsText+'" for the default response.'}return Promise.reject(c)}}}}return Promise.resolve(e)}))}function fe(e,t,r,n){return{create:function(o,i){return new pe(o,i,e,t,r,n)}}}var pe=function(e){function t(t,r,n,o,i,a){var s=e.call(this,t,r)||this;function u(e){return"number"==typeof e}return s.retryCount=u(n)?n:3,s.retryInterval=u(o)?o:3e4,s.minRetryInterval=u(i)?i:3e3,s.maxRetryInterval=u(a)?a:9e4,s}return U(t,e),t.prototype.sendRequest=function(e){var t=this;return this._nextPolicy.sendRequest(e.clone()).then((function(r){return he(t,e,r)})).catch((function(r){return he(t,e,r.response,void 0,r)}))},t}(oe);function he(e,t,r,n,o){n=function(e,t,r){t||(t={retryCount:0,retryInterval:0}),r&&(t.error&&(r.innerError=t.error),t.error=r),t.retryCount++;var n=Math.pow(2,t.retryCount)-1;return n*=.8*e.retryInterval+Math.floor(Math.random()*(1.2*e.retryInterval-.8*e.retryInterval)),t.retryInterval=Math.min(e.minRetryInterval+n,e.maxRetryInterval),t}(e,n,o);var i=t.abortSignal&&t.abortSignal.aborted;if(!i&&function(e,t,r){if(null==t||t<500&&408!==t||501===t||505===t)return!1;if(!r)throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null.");return(r&&r.retryCount)0},e.prototype.set=function(e,t){if(e)if(null!=t){var r=Array.isArray(t)?t:t.toString();this._rawQuery[e]=r}else delete this._rawQuery[e]},e.prototype.get=function(e){return e?this._rawQuery[e]:void 0},e.prototype.toString=function(){var e="";for(var t in this._rawQuery){e&&(e+="&");var r=this._rawQuery[t];if(Array.isArray(r)){for(var n=[],o=0,i=r;o0)}(e)){var o=void 0;(null==t?void 0:t.baseUri)&&Ge.includes(null==t?void 0:t.baseUri)&&(o=t.baseUri+"/.default"),r=new Je(e,o)}else r=e;if(r&&!r.signRequest)throw new Error("credentials argument needs to implement signRequest method");if(this._withCredentials=t.withCredentials||!1,this._httpClient=t.httpClient||new B,this._requestPolicyOptions=new ie(t.httpPipelineLogger),Array.isArray(t.requestPolicyFactories))n=t.requestPolicyFactories;else if(n=function(e,t){var r=[];t.generateClientRequestIdHeader&&r.push(de(t.clientRequestIdHeaderName));e&&("function"==typeof e.create?r.push(e):r.push(He(e)));var n=Ke(t.userAgentHeaderName,ve),o=Ke(t.userAgent,ge);n&&o&&r.push(be({key:n,value:o}));var i=I(I({},qe),t.redirectOptions);i.handleRedirects&&r.push(Ce(i.maxRetries));r.push(function(e){return void 0===e&&(e=30),{create:function(t,r){return new Ue(t,r,e)}}}(t.rpRegistrationRetryTimeout)),t.noRetryPolicy||(r.push(fe()),r.push(Le()),r.push(Ye()));r.push(ae(t.deserializationContentTypes)),t.proxySettings?r.push(Ve()):void 0;t.agentSettings&&r.push(Be(t.agentSettings));return r}(r,t),t.requestPolicyFactories){var i=t.requestPolicyFactories(n);i&&(n=i)}this._requestPolicyFactories=n}return t.prototype.sendRequest=function(e){if(null==e||"object"!=typeof e)throw new Error("options cannot be null or undefined and it must be of type object.");var t,n;try{"object"==typeof(n=e)&&"string"==typeof n.url&&"string"==typeof n.method&&"object"==typeof n.headers&&r(n.headers)&&"function"==typeof n.validateRequestProperties&&"function"==typeof n.prepare&&"function"==typeof n.clone?(e.validateRequestProperties(),t=e):t=(t=new C).prepare(e)}catch(e){return Promise.reject(e)}var o=this._httpClient;if(this._requestPolicyFactories&&this._requestPolicyFactories.length>0)for(var i=this._requestPolicyFactories.length-1;i>=0;--i)o=this._requestPolicyFactories[i].create(o,this._requestPolicyOptions);return o.sendRequest(t)},t.prototype.sendOperationRequest=function(t,r,n){"function"==typeof t.options&&(n=t.options,t.options=void 0);var o,i=new C;try{var a=r.baseUrl||this.baseUri;if(!a)throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.");i.method=r.httpMethod,i.operationSpec=r;var s=Re.parse(a);if(r.path&&s.appendPath(r.path),r.urlParameters&&r.urlParameters.length>0)for(var u=0,l=r.urlParameters;u0)for(var p=0,h=r.queryParameters;p0){t.formData={};for(var p=0,h=n.formDataParameters;p0){if(o.isConstant)a=o.defaultValue;else{var s=et(r,n);s.propertyFound||(s=et(t,n));var u=!1;s.propertyFound||(u=o.required||"options"===n[0]&&2===n.length),a=u?o.defaultValue:s.propertyValue}var l=Y(n,o);i.serialize(o,a,l)}}else for(var c in o.required&&(a={}),n){var f=o.type.modelProperties[c],p=n[c],h=e(t,r,p,f,i),d=Y(p,f);i.serialize(f,h,d),void 0!==h&&(a||(a={}),a[c]=h)}return a}(e,t,r.parameterPath,r.mapper,n)}function et(e,t){for(var r={propertyFound:!1},n=0;n> Request: "+JSON.stringify(t.request,void 0,2)),e.logger(">> Response status code: "+t.status);var r=t.bodyAsText;return e.logger(">> Body: "+r),Promise.resolve(t)}(t,e)}))},t}(oe);var nt=d.HeaderConstants,ot=function(){function e(e,t){if(void 0===t&&(t="Bearer"),this.authorizationScheme="Bearer",!e)throw new Error("token cannot be null or undefined.");this.token=e,this.authorizationScheme=t}return e.prototype.signRequest=function(e){return e.headers||(e.headers=new o),e.headers.set(nt.AUTHORIZATION,this.authorizationScheme+" "+this.token),Promise.resolve(e)},e}(),it=d.HeaderConstants,at=function(){function e(e,t,r){if(void 0===r&&(r="Basic"),this.authorizationScheme="Basic",null==e||"string"!=typeof e.valueOf())throw new Error("userName cannot be null or undefined and must be of type string.");if(null==t||"string"!=typeof t.valueOf())throw new Error("password cannot be null or undefined and must be of type string.");this.userName=e,this.password=t,this.authorizationScheme=r}return e.prototype.signRequest=function(e){var t=this.userName+":"+this.password,r=this.authorizationScheme+" "+btoa(t);return e.headers||(e.headers=new o),e.headers.set(it.AUTHORIZATION,r),Promise.resolve(e)},e}(),st=function(){function e(e){if(!e||e&&!e.inHeader&&!e.inQuery)throw new Error('options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.');this.inHeader=e.inHeader,this.inQuery=e.inQuery}return e.prototype.signRequest=function(e){if(!e)return Promise.reject(new Error('webResource cannot be null or undefined and must be of type "object".'));if(this.inHeader)for(var t in e.headers||(e.headers=new o),this.inHeader)e.headers.set(t,this.inHeader[t]);if(this.inQuery){if(!e.url)return Promise.reject(new Error("url cannot be null in the request object."));for(var r in e.url.indexOf("?")<0&&(e.url+="?"),this.inQuery)e.url.endsWith("?")||(e.url+="&"),e.url+=r+"="+this.inQuery[r]}return Promise.resolve(e)},e}(),ut=function(e){function t(t){if(!t||t&&"string"!=typeof t)throw new Error("topicKey cannot be null or undefined and must be of type string.");var r={inHeader:{"aeg-sas-key":t}};return e.call(this,r)||this}return U(t,e),t}(st),lt=function(e){function t(t){if(!t||t&&"string"!=typeof t)throw new Error("domainKey cannot be null or undefined and must be of type string.");var r={inHeader:{"aeg-sas-key":t}};return e.call(this,r)||this}return U(t,e),t}(st);e.ApiKeyCredentials=st,e.AzureIdentityCredentialAdapter=Je,e.BaseRequestPolicy=oe,e.BasicAuthenticationCredentials=at,e.Constants=d,e.DefaultHttpClient=B,e.DomainCredentials=lt,e.HttpHeaders=o,e.MapperType=q,e.RequestPolicyOptions=ie,e.RestError=k,e.Serializer=_,e.ServiceClient=We,e.TokenCredentials=ot,e.TopicCredentials=ut,e.URLBuilder=Re,e.URLQuery=Ee,e.WebResource=C,e.agentPolicy=Be,e.applyMixins=function(e,t){t.forEach((function(t){Object.getOwnPropertyNames(t.prototype).forEach((function(r){e.prototype[r]=t.prototype[r]}))}))},e.delay=w,e.deserializationPolicy=ae,e.deserializeResponseBody=ce,e.encodeUri=function(e){return encodeURIComponent(e).replace(/!/g,"%21").replace(/"/g,"%27").replace(/\(/g,"%28").replace(/\)/g,"%29").replace(/\*/g,"%2A")},e.executePromisesSequentially=function(e,t){var r=Promise.resolve(t);return e.forEach((function(e){r=r.then(e)})),r},e.exponentialRetryPolicy=fe,e.flattenResponse=tt,e.generateClientRequestIdPolicy=de,e.generateUuid=b,e.getDefaultProxySettings=Qe,e.getDefaultUserAgentValue=ge,e.isDuration=R,e.isNode=y,e.isValidUuid=g,e.logPolicy=function(e){return void 0===e&&(e=console.log),{create:function(t,r){return new rt(t,r,e)}}},e.promiseToCallback=function(e){if("function"!=typeof e.then)throw new Error("The provided input is not a Promise.");return function(t){e.then((function(e){t(void 0,e)}),(function(e){t(e)}))}},e.promiseToServiceCallback=function(e){if("function"!=typeof e.then)throw new Error("The provided input is not a Promise.");return function(t){e.then((function(e){process.nextTick(t,void 0,e.parsedBody,e.request,e)}),(function(e){process.nextTick(t,e)}))}},e.proxyPolicy=Ve,e.redirectPolicy=Ce,e.serializeObject=function e(t){if(null!=t){if(t instanceof Uint8Array)return t=i(t);if(t instanceof Date)return t.toISOString();if(Array.isArray(t)){for(var r=[],n=0;n 1 && arguments[1] !== undefined ? arguments[1] : 0;\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (var i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.7.0\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpClient } from \"./httpClient\";\nimport { HttpHeaders } from \"./httpHeaders\";\nimport { WebResourceLike, TransferProgressEvent } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.agentSettings) {\n throw new Error(\"HTTP agent settings not supported in browser environment\");\n }\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n const listener = () => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n xhr.responseType = request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (request.streamResponseBody) {\n return new Promise((resolve, reject) => {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n }\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n) {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest) {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n) {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n xhr.addEventListener(\"abort\", () =>\n reject(\n new RestError(\"The request was aborted\", RestError.REQUEST_ABORTED_ERROR, undefined, request)\n )\n );\n xhr.addEventListener(\"timeout\", () =>\n reject(\n new RestError(\n `timeout of ${xhr.timeout}ms exceeded`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nconst parser = new DOMParser();\n\n// Policy to make our code Trusted Types compliant.\n// https://github.com/w3c/webappsec-trusted-types\n// We are calling DOMParser.parseFromString() to parse XML payload from Azure services.\n// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing\n// according to the spec. There are no HTML/XSS security concerns on the usage of\n// parseFromString() here.\nlet ttPolicy: Pick | undefined;\ntry {\n if (typeof self.trustedTypes !== \"undefined\") {\n ttPolicy = self.trustedTypes.createPolicy(\"@azure/ms-rest-js#xml.browser\", {\n createHTML: (s: any) => s,\n });\n }\n} catch (e) {\n console.warn('Could not create trusted types policy \"@azure/ms-rest-js#xml.browser\"');\n}\n\nexport function parseXML(str: string): Promise {\n try {\n const dom = parser.parseFromString((ttPolicy?.createHTML(str) ?? str) as string, \"application/xml\");\n throwIfError(dom);\n\n const obj = domToObject(dom.childNodes[0]);\n return Promise.resolve(obj);\n } catch (err) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS = \"\";\ntry {\n const invalidXML = (ttPolicy?.createHTML(\"INVALID\") ?? \"INVALID\") as string;\n errorNS =\n parser.parseFromString(invalidXML, \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n} catch (ignored) {\n // Most browsers will return a document containing , but IE will throw.\n}\n\nfunction throwIfError(dom: Document) {\n if (errorNS) {\n const parserErrors = dom.getElementsByTagNameNS(errorNS, \"parsererror\");\n if (parserErrors.length) {\n throw new Error(parserErrors.item(0)!.innerHTML);\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[\"$\"] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[\"$\"][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[\"_\"] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\n// tslint:disable-next-line:no-null-keyword\nconst doc = document.implementation.createDocument(null, null, null);\nconst serializer = new XMLSerializer();\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const rootName = (opts && opts.rootName) || \"root\";\n const dom = buildNode(obj, rootName)[0];\n return (\n '' + serializer.serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = doc.createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string): Node[] {\n if (typeof obj === \"string\" || typeof obj === \"number\" || typeof obj === \"boolean\") {\n const elem = doc.createElement(elementName);\n elem.textContent = obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = doc.createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === \"$\") {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else {\n for (const child of buildNode(obj[key], key)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n * and \"plugins\" section in webpack.testconfig.ts.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-command-name\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst agentNotSupportedInBrowser = new Error(\"AgentPolicy is not supported in browser environment\");\n\nexport function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw agentNotSupportedInBrowser;\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw agentNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw agentNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ProxySettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.node.js b/node_modules/@azure/ms-rest-js/dist/msRest.node.js new file mode 100644 index 0000000..a87a791 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.node.js @@ -0,0 +1,5387 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var uuid = require('uuid'); +var tslib = require('tslib'); +var http = require('http'); +var https = require('https'); +var node_fetch = _interopDefault(require('node-fetch')); +var FormData = _interopDefault(require('form-data')); +var stream = require('stream'); +var tunnel = require('tunnel'); +var xml2js = require('xml2js'); +var os = require('os'); + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.7.0", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +function generateUuid() { + return uuid.v4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +} +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; +}()); +function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); +} +function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; +} +function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +// TODO: why is this here? +function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; +} +var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; +}()); + +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var RestError = /** @class */ (function (_super) { + tslib.__extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { + } + FetchHttpClient.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var abortController, abortListener, formData, requestForm_1, appendFormValue, _i, _a, formKey, formValue, j, contentType, body, loadedBytes_1, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, _b, _c, onDownloadProgress_1, responseBody, loadedBytes_2, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResource) cannot be null or undefined and must be of type object."); + } + abortController = new AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + formKey = _a[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + loadedBytes_1 = 0; + uploadReportStream = new stream.Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_1 += chunk.length; + httpRequest.onUploadProgress({ loadedBytes: loadedBytes_1 }); + callback(undefined, chunk); + }, + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _d.sent(); + requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + _d.label = 2; + case 2: + _d.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _d.sent(); + headers = parseHeaders(response.headers); + _b = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? response.body + : undefined + }; + if (!!httpRequest.streamResponseBody) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _d.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _d.label = 6; + case 6: + operationResponse = (_b.bodyAsText = _c, + _b.redirected = response.redirected, + _b.url = response.url, + _b); + onDownloadProgress_1 = httpRequest.onDownloadProgress; + if (onDownloadProgress_1) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + loadedBytes_2 = 0; + downloadReportStream = new stream.Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_2 += chunk.length; + onDownloadProgress_1({ loadedBytes: loadedBytes_2 }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress_1({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _d.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _d.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (_e) { }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; + } + }); + }); + }; + return FetchHttpClient; +}()); +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise(function (resolve) { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function parseHeaders(headers) { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { + httpHeaders.set(key, value); + }); + return httpHeaders; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * A class that handles the query portion of a URLBuilder. + */ +var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; +}()); +/** + * A class that handles creating, modifying, and parsing URLs. + */ +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; +}()); +var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; +}()); +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; +}()); +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); +} +function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function createProxyAgent(requestUrl, proxySettings, headers) { + var tunnelOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost(), + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = "" + proxySettings.username; + } + var requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + var isRequestHttps = requestScheme.toLowerCase() === "https"; + var proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + var isProxyHttps = proxyScheme.toLowerCase() === "https"; + var proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var NodeFetchHttpClient = /** @class */ (function (_super) { + tslib.__extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + return _super !== null && _super.apply(this, arguments) || this; + } + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; + }); + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var requestInit, _a, httpAgent, httpsAgent, tunnel, options, agent; + return tslib.__generator(this, function (_b) { + requestInit = {}; + if (httpRequest.agentSettings) { + _a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } + else if (httpAgent) { + requestInit.agent = httpAgent; + } + } + else if (httpRequest.proxySettings) { + tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + requestInit.agent = tunnel.agent; + } + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } + else { + options = { keepAlive: true }; + agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + return [2 /*return*/, requestInit]; + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (_operationResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + /* no_op */ + return [2 /*return*/]; + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function stringifyXML(obj, opts) { + var builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} +function parseXML(str) { + var xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise(function (resolve, reject) { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, function (err, res) { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; +}()); +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} +var defaultJsonContentTypes = ["application/json", "text/json"]; +var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +var DeserializationPolicy = /** @class */ (function (_super) { + tslib.__extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); +function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = stripRequest(parsedResponse.request); + error.response = stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = stripRequest(parsedResponse.request); + restError.response = stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +var ExponentialRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + tslib.__extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + var runtimeInfo = { + key: "Node", + value: process.version, + }; + var osInfo = { + key: "OS", + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")", + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); +} +var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +var UserAgentPolicy = /** @class */ (function (_super) { + tslib.__extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +var RedirectPolicy = /** @class */ (function (_super) { + tslib.__extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); +function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); +} +function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} + +function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +var RPRegistrationPolicy = /** @class */ (function (_super) { + tslib.__extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; +} +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +var SigningPolicy = /** @class */ (function (_super) { + tslib.__extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry$1(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData$1(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry$1(policy, request, operationResponse, err, retryData) { + return tslib.__awaiter(this, void 0, void 0, function () { + var error_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData$1(policy, retryData, err); + if (!(err && + err.code && + shouldRetry$1(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +(function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function agentPolicy(agentSettings) { + return { + create: function (nextPolicy, options) { + return new AgentPolicy(nextPolicy, options, agentSettings); + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + tslib.__extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options, agentSettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.agentSettings = agentSettings; + return _this; + } + AgentPolicy.prototype.sendRequest = function (request) { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + }; + return AgentPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * @internal + */ +var noProxyList = loadNoProxy(); +var byPassedList = new Map(); +/** + * @internal + */ +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { + if (noProxyList.length === 0) { + return false; + } + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); + } + return []; +} +/** + * @internal + */ +function extractAuthFromUrl(url) { + var atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth, + }; +} +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username: username, + password: password, + }; +} +function proxyPolicy(proxySettings) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + tslib.__extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + }; + return ProxyPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var StatusCodes = Constants.HttpConstants.StatusCodes; +var DEFAULT_RETRY_COUNT = 3; +function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return tslib.__awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var accessToken, result; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return tslib.__awaiter(this, void 0, void 0, function () { + var tokenResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new NodeFetchHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; +}()); +function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } +} +function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = tslib.__spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} +var LogPolicy = /** @class */ (function (_super) { + tslib.__extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; +}(BaseRequestPolicy)); +function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME$1 = "Bearer"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var HeaderConstants$1 = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME$2 = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants$1.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * Authenticates to a service using an API key. + */ +var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var TopicCredentials = /** @class */ (function (_super) { + tslib.__extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; +}(ApiKeyCredentials)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DomainCredentials = /** @class */ (function (_super) { + tslib.__extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; +}(ApiKeyCredentials)); + +exports.ApiKeyCredentials = ApiKeyCredentials; +exports.AzureIdentityCredentialAdapter = AzureIdentityCredentialAdapter; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; +exports.Constants = Constants; +exports.DefaultHttpClient = NodeFetchHttpClient; +exports.DomainCredentials = DomainCredentials; +exports.HttpHeaders = HttpHeaders; +exports.MapperType = MapperType; +exports.RequestPolicyOptions = RequestPolicyOptions; +exports.RestError = RestError; +exports.Serializer = Serializer; +exports.ServiceClient = ServiceClient; +exports.TokenCredentials = TokenCredentials; +exports.TopicCredentials = TopicCredentials; +exports.URLBuilder = URLBuilder; +exports.URLQuery = URLQuery; +exports.WebResource = WebResource; +exports.agentPolicy = agentPolicy; +exports.applyMixins = applyMixins; +exports.delay = delay; +exports.deserializationPolicy = deserializationPolicy; +exports.deserializeResponseBody = deserializeResponseBody; +exports.encodeUri = encodeUri; +exports.executePromisesSequentially = executePromisesSequentially; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.flattenResponse = flattenResponse; +exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; +exports.generateUuid = generateUuid; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.getDefaultUserAgentValue = getDefaultUserAgentValue; +exports.isDuration = isDuration; +exports.isNode = isNode; +exports.isValidUuid = isValidUuid; +exports.logPolicy = logPolicy; +exports.promiseToCallback = promiseToCallback; +exports.promiseToServiceCallback = promiseToServiceCallback; +exports.proxyPolicy = proxyPolicy; +exports.redirectPolicy = redirectPolicy; +exports.serializeObject = serializeObject; +exports.signingPolicy = signingPolicy; +exports.stripRequest = stripRequest; +exports.stripResponse = stripResponse; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=msRest.node.js.map diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map b/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map new file mode 100644 index 0000000..4864581 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.node.js","sources":["../lib/httpHeaders.ts","../lib/util/base64.ts","../lib/util/constants.ts","../lib/util/utils.ts","../lib/serializer.ts","../lib/webResource.ts","../node_modules/event-target-shim/src/event.mjs","../node_modules/event-target-shim/src/event-target.mjs","../node_modules/abort-controller/src/abort-signal.ts","../node_modules/abort-controller/src/abort-controller.ts","../lib/restError.ts","../lib/fetchHttpClient.ts","../lib/url.ts","../lib/proxyAgent.ts","../lib/nodeFetchHttpClient.ts","../lib/httpPipelineLogLevel.ts","../node_modules/@azure/core-auth/src/tokenCredential.ts","../lib/operationParameter.ts","../lib/operationSpec.ts","../lib/util/xml.ts","../lib/policies/requestPolicy.ts","../lib/policies/deserializationPolicy.ts","../lib/policies/exponentialRetryPolicy.ts","../lib/policies/generateClientRequestIdPolicy.ts","../lib/policies/msRestUserAgentPolicy.ts","../lib/policies/userAgentPolicy.ts","../lib/policies/redirectPolicy.ts","../lib/policies/rpRegistrationPolicy.ts","../lib/policies/signingPolicy.ts","../lib/policies/systemErrorRetryPolicy.ts","../lib/queryCollectionFormat.ts","../lib/policies/agentPolicy.ts","../lib/policies/proxyPolicy.ts","../lib/policies/throttlingRetryPolicy.ts","../lib/credentials/azureIdentityTokenCredentialAdapter.ts","../lib/serviceClient.ts","../lib/policies/logPolicy.ts","../lib/credentials/tokenCredentials.ts","../lib/credentials/basicAuthenticationCredentials.ts","../lib/credentials/apiKeyCredentials.ts","../lib/credentials/topicCredentials.ts","../lib/credentials/domainCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string) {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: any): object is HttpHeadersLike {\n if (!object || typeof object !== \"object\") {\n return false;\n }\n\n if (\n typeof object.rawHeaders === \"function\" &&\n typeof object.clone === \"function\" &&\n typeof object.get === \"function\" &&\n typeof object.set === \"function\" &&\n typeof object.contains === \"function\" &&\n typeof object.remove === \"function\" &&\n typeof object.headersArray === \"function\" &&\n typeof object.headerValues === \"function\" &&\n typeof object.headerNames === \"function\" &&\n typeof object.toJson === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(): RawHttpHeaders {\n return this.rawHeaders();\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson());\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n return new HttpHeaders(this.rawHeaders());\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * Encodes a string in base64 format.\n * @param value the string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value the Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value the base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.7.0\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport AbortController from \"abort-controller\";\nimport FormData from \"form-data\";\n\nimport { HttpClient } from \"./httpClient\";\nimport { WebResourceLike } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { RestError } from \"./restError\";\nimport { Readable, Transform } from \"stream\";\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\nexport type CommonRequestInfo = string; // we only call fetch() on string urls.\n\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport abstract class FetchHttpClient implements HttpClient {\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResource) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new RestError(\n \"The request was aborted\",\n RestError.REQUEST_ABORTED_ERROR,\n undefined,\n httpRequest\n );\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n let loadedBytes = 0;\n const uploadReportStream = new Transform({\n transform: (chunk: string | Buffer, _encoding, callback) => {\n loadedBytes += chunk.length;\n httpRequest.onUploadProgress!({ loadedBytes });\n callback(undefined, chunk);\n },\n });\n\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: httpRequest.streamResponseBody\n ? ((response.body as unknown) as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !httpRequest.streamResponseBody ? await response.text() : undefined,\n redirected: response.redirected,\n url: response.url,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n let loadedBytes = 0;\n const downloadReportStream = new Transform({\n transform: (chunk: string | Buffer, _encoding, callback) => {\n loadedBytes += chunk.length;\n onDownloadProgress({ loadedBytes });\n callback(undefined, chunk);\n },\n });\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new RestError(\n \"The request was aborted\",\n RestError.REQUEST_ABORTED_ERROR,\n undefined,\n httpRequest\n );\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(operationResponse!.readableStreamBody);\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((_e) => {});\n }\n }\n }\n\n abstract async prepareRequest(httpRequest: WebResourceLike): Promise>;\n abstract async processRequest(operationResponse: HttpOperationResponse): Promise;\n abstract async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise;\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable): Promise {\n return new Promise((resolve) => {\n stream.on(\"close\", resolve);\n stream.on(\"end\", resolve);\n stream.on(\"error\", resolve);\n });\n}\n\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\n\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: URLBuilder.parse(proxySettings.host).getHost() as string,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const requestScheme = URLBuilder.parse(requestUrl).getScheme() || \"\";\n const isRequestHttps = requestScheme.toLowerCase() === \"https\";\n const proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || \"\";\n const isProxyHttps = proxyScheme.toLowerCase() === \"https\";\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\n// Duplicate tunnel.HttpsOverHttpsOptions to avoid exporting createTunnel() with dependency on @types/tunnel\n// createIunnel() is only imported by tests.\nexport interface HttpsProxyOptions {\n host: string;\n port: number;\n localAddress?: string;\n proxyAuth?: string;\n headers?: { [key: string]: any };\n ca?: Buffer[];\n servername?: string;\n key?: Buffer;\n cert?: Buffer;\n}\n\ninterface HttpsOverHttpsOptions {\n maxSockets?: number;\n ca?: Buffer[];\n key?: Buffer;\n cert?: Buffer;\n proxy?: HttpsProxyOptions;\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport node_fetch from \"node-fetch\";\n\nimport {\n CommonRequestInfo,\n CommonRequestInit,\n CommonResponse,\n FetchHttpClient,\n} from \"./fetchHttpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\nimport { createProxyAgent, ProxyAgent } from \"./proxyAgent\";\n\nexport class NodeFetchHttpClient extends FetchHttpClient {\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return (node_fetch(input, init) as unknown) as Promise;\n }\n\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (httpRequest.agentSettings) {\n const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings;\n if (httpsAgent && httpRequest.url.startsWith(\"https\")) {\n requestInit.agent = httpsAgent;\n } else if (httpAgent) {\n requestInit.agent = httpAgent;\n }\n } else if (httpRequest.proxySettings) {\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n requestInit.agent = tunnel.agent;\n }\n\n if (httpRequest.keepAlive === true) {\n if (requestInit.agent) {\n requestInit.agent.keepAlive = true;\n } else {\n const options: http.AgentOptions | https.AgentOptions = { keepAlive: true };\n const agent = httpRequest.url.startsWith(\"https\")\n ? new https.Agent(options)\n : new http.Agent(options);\n requestInit.agent = agent;\n }\n }\n\n return requestInit;\n }\n\n async processRequest(_operationResponse: HttpOperationResponse): Promise {\n /* no_op */\n return;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TracingContext } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * Tracing Context for the current request.\n */\n tracingContext?: TracingContext;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n\n /**\n * Claim details to perform the Continuous Access Evaluation authentication flow\n */\n claims?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as xml2js from \"xml2js\";\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const builder = new xml2js.Builder({\n rootName: (opts || {}).rootName,\n renderOpts: {\n pretty: false,\n },\n });\n return builder.buildObject(obj);\n}\n\nexport function parseXML(str: string): Promise {\n const xmlParser = new xml2js.Parser({\n explicitArray: false,\n explicitCharkey: false,\n explicitRoot: false,\n });\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err: any, res: any) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as os from \"os\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\nimport { Constants } from \"../util/constants\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new AgentPolicy(nextPolicy, options, agentSettings!);\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n agentSettings: AgentSettings;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n agentSettings: AgentSettings\n ) {\n super(nextPolicy, options);\n this.agentSettings = agentSettings;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.agentSettings) {\n request.agentSettings = this.agentSettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"../util/constants\";\nimport { URLBuilder } from \"../url\";\n\n/**\n * @internal\n */\nexport const noProxyList: string[] = loadNoProxy();\nconst byPassedList: Map = new Map();\n\n/**\n * @internal\n */\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n// Check whether the host of a given `uri` is in the noProxyList.\n// If there's a match, any request sent to the same host won't have the proxy settings set.\n// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\nfunction isBypassed(uri: string): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (byPassedList.has(host)) {\n return byPassedList.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n byPassedList.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * @internal\n */\nfunction extractAuthFromUrl(\n url: string\n): { username?: string; password?: string; urlWithoutAuth: string } {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\nexport function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ProxyPolicy(nextPolicy, options, proxySettings!);\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n proxySettings: ProxySettings;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n proxySettings: ProxySettings\n ) {\n super(nextPolicy, options);\n this.proxySettings = proxySettings;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.proxySettings && !isBypassed(request.url)) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","__extends","Transform","tunnel.httpsOverHttps","tunnel.httpsOverHttp","tunnel.httpOverHttps","tunnel.httpOverHttp","https.Agent","http.Agent","HttpPipelineLogLevel","xml2js.Builder","xml2js.Parser","utils.stripRequest","utils.stripResponse","utils\n .delay","utils.generateUuid","os.arch","os.type","os.release","utils\n .delay","retry","shouldRetry","updateRetryData","utils.delay","QueryCollectionFormat","MSRestConstants","DefaultHttpClient","utils.prepareXMLRootList","__spreadArrays","utils.isPrimitiveType","DEFAULT_AUTHORIZATION_SCHEME","HeaderConstants","base64.encodeString"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AAEA;;;AAGA,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;SA4Ee,iBAAiB,CAAC,MAAY;IAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,KAAK,CAAC;KACd;IAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;QACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;QAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;QACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;QACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;QACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;IAME,qBAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;KACF;;;;;;;IAQM,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;KACH;;;;;;IAOM,yBAAG,GAAV,UAAW,UAAkB;QAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;KAC3C;;;;IAKM,8BAAQ,GAAf,UAAgB,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;KACrD;;;;;;IAOM,4BAAM,GAAb,UAAc,UAAkB;QAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;KACf;;;;IAKM,gCAAU,GAAjB;QACE,IAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SAClD;QACD,OAAO,MAAM,CAAC;KACf;;;;IAKM,kCAAY,GAAnB;QACE,IAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;KAChB;;;;IAKM,iCAAW,GAAlB;QACE,IAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;KACpB;;;;IAKM,kCAAY,GAAnB;QACE,IAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;KACrB;;;;IAKM,4BAAM,GAAb;QACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;KAC1B;;;;IAKM,8BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;KACtC;;;;IAKM,2BAAK,GAAZ;QACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;KAC3C;IACH,kBAAC;AAAD,CAAC;;ACrOD;AACA;AAEA;;;;AAIA,SAAgB,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;;AAIA,SAAgB,eAAe,CAAC,KAAiB;;;IAG/C,IAAM,WAAW,GAAG,KAAK,YAAY,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;;AAIA,SAAgB,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC;;AC5BD;AACA;AAEA,IAAa,SAAS,GAAG;;;;;;IAMvB,aAAa,EAAE,OAAO;;;;;;;IAQtB,IAAI,EAAE,OAAO;;;;;;;IAQb,KAAK,EAAE,QAAQ;;;;;;;IAQf,UAAU,EAAE,YAAY;;;;;;;IAQxB,WAAW,EAAE,aAAa;;;;IAK1B,QAAQ,EAAE,UAAU;;;;IAKpB,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;;;;;;;QAOb,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;SACrB;KACF;;;;IAKD,eAAe,EAAE;;;;;;;QAOf,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;;;;;;;;;QAU9B,WAAW,EAAE,aAAa;;;;;;;QAQ1B,UAAU,EAAE,YAAY;KACzB;CACF;;AC3GD;AACA,AAQA;;;AAGA,IAAa,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B,AAUA;;;;;;AAMA,SAAgB,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;;;AAQA,SAAgB,aAAa,CAAC,QAA+B;IAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;;;AAQA,SAAgB,YAAY,CAAC,OAAwB;IACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;;;AAOA,SAAgB,WAAW,CAAC,IAAY;IACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;IACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,AA4BA;;;;;AAKA,SAAgB,YAAY;IAC1B,OAAOA,OAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;;;;AAWA,SAAgB,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;IACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;QACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;KACtC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,AAeA;;;;;;AAMA,SAAgB,KAAK,CAAI,CAAS,EAAE,KAAS;IAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,GAAA,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;AACvE,CAAC;AAqBD;;;;;;AAMA,SAAgB,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAY;QAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;YACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;SACrB,EACD,UAAC,GAAU;YACT,EAAE,CAAC,GAAG,CAAC,CAAC;SACT,CACF,CAAC;KACH,CAAC;AACJ,CAAC;AAED;;;;;AAKA,SAAgB,wBAAwB,CAAI,OAAuC;IACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAsB;QAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;YAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;SAC3E,EACD,UAAC,GAAU;YACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;SAC3B,CACF,CAAC;KACH,CAAC;AACJ,CAAC;AAED,SAAgB,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;IAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;AAChC,CAAC;AAED;;;;;AAKA,SAAgB,WAAW,CAAC,UAAe,EAAE,WAAkB;IAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;QAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;YAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;SAC1D,CAAC,CAAC;KACJ,CAAC,CAAC;AACL,CAAC;AAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;AAElM;;;;;AAKA,SAAgB,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;;AAOA,SAAgB,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;;AAMA,SAAgB,eAAe,CAAC,KAAU;IACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;;AC/RD;AACA;IAME,oBACkB,YAAyC,EACzC,KAAe;QADf,6BAAA,EAAA,iBAAyC;QAAzC,iBAAY,GAAZ,YAAY,CAA6B;QACzC,UAAK,GAAL,KAAK,CAAU;KAC7B;IAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;QAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;YACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;SACH,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;gBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAA,CAAC,EAC5E;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;KACF;;;;;;;;;;;;IAaD,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;QACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;;;;;;;;;;QAYO,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;;YAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBACxC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;gBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACrF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACzF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACvF;SACF;QACD,OAAO,OAAO,CAAC;KAChB;;;;;;;;;;;;IAaD,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;QAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;gBAIzE,YAAY,GAAG,EAAE,CAAC;aACnB;;YAED,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;YAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;SAC/F;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;;;;;;gBAMd,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;oBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBAClC;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;gBACzF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;gBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;aAClC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;aACxC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAGC,YAAmB,CAAC,YAAY,CAAC,CAAC;aAC7C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;aAC9C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC7F;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;KAChB;IACH,iBAAC;AAAD,CAAC,IAAA;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;;IAED,IAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;IAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;;IAED,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;;IAElD,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,IAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;YAAxB,IAAM,IAAI,iBAAA;YACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YAC9C,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,EAAE,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,EAAE,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;KACH;IACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;QACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;KACvB,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;KACvC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YACvC,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;sBACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;sBACpC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;YACzD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;aACH;YACD,KAAK,GAAG,KAAK,CAAC;SACf;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;IAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;KACzD;IACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;KACzE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;IAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;KAC1D;IACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;QAAlC,IAAM,GAAG,SAAA;QACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;KAC5F;IACD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;AAKA,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;SACH;QAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;SACnF;QACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;iBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAAQ,SAAoB,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAAzB,IAAM,QAAQ,cAAA;oBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;sBAChC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;sBAChD,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;gBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;;;;wBAIjC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;wBACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC5C;yBAAM,IAAI,cAAc,CAAC,YAAY,EAAE;wBACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;qBAChF;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC1C;iBACF;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;oCAC/B,cAAc;gBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,GAAA,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;iBACH;;YARH,KAAK,IAAM,cAAc,IAAI,MAAM;wBAAxB,cAAc;aASxB;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB;IAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC3C,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;QAAtC,IAAM,GAAG,SAAA;QACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAA9C,IAAM,SAAS,SAAA;gBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;gBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;aACH;iBAAM;gBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;gBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;oBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;oBAC3D,IAAI,kBAAkB,EAAE;wBACtB,iBAAiB,GAAG,EAAE,CAAC;qBACxB;iBACF;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;aACH;SACF;aAAM;;YAEL,IAAI,gBAAgB,SAAA,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;YAEvB,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;gBAArB,IAAM,IAAI,cAAA;gBACb,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;YAUtE,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,SAAA,CAAC;;YAEpB,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;;;gBAGF,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;oBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;wBACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;qBAC5B;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;YACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;SACb,CAAC;QAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;;IAGlB,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;SACpF;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;;IAGlB,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;YAEhC,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;SACxF;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;sBAC3B,kBAAkB;sBAClB,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,QACE,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;AACJ,CAAC;AAoHD;AACA,SAAgB,eAAe,CAAC,WAAgB;IAC9C,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,IAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;SAC/D;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;AAGA,SAAS,OAAO,CAAmB,CAAW;IAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;QAAhB,IAAM,GAAG,UAAA;QACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,IAAa,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC;;ACtiCF;AACA,SAgKgB,iBAAiB,CAAC,MAAW;IAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,OAAO,KAAK,CAAC;KACd;IACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;QAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;QACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;QAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;QACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;QACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;QACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;AAQA;IAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;QAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;KACpC;;;;;;IAOD,+CAAyB,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;KACF;;;;;;IAOD,6BAAO,GAAP,UAAQ,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;aACvF,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;;QAGD,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;QAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;YAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;YACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,KAAG,GACL,OAAO;iBACN,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;iBACjC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAY,CAAC,CAAC;YACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,gBAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;6BAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;6BACJ,8EAA0E,aAAa,kCAA6B,CAAA;6BACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;iBACF,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;SAChB;;QAGD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,yFAAqF;oBACrF,mJAA2I,CAC9I,CAAC;aACH;;YAED,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;;YAED,IAAM,WAAW,GAAG,EAAE,CAAC;;YAEvB,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF;;YAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;;QAGD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAAlD,IAAM,UAAU,SAAA;gBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;SAC5D;;QAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;;QAGD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;;YAE7B,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;QAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QAErD,OAAO,IAAI,CAAC;KACb;;;;;IAMD,2BAAK,GAAL;QACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;KACf;IACH,kBAAC;AAAD,CAAC;;;;;;;AChhBD;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASM,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQM,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASM,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASM,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASM,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;AC1WD;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;AC5DD;AACA;IAK+BG,mCAAK;IAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;QANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;QAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;KAClD;IAzBe,4BAAkB,GAAW,oBAAoB,CAAC;IAClD,+BAAqB,GAAW,uBAAuB,CAAC;IACxD,qBAAW,GAAW,aAAa,CAAC;IAwBtD,gBAAC;CAAA,CA3B8B,KAAK;;ACNpC;AACA,AAgCA;IAAA;KAwMC;IAvMO,qCAAW,GAAjB,UAAkB,WAA4B;;;;;;wBAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnD,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;yBACH;wBAEK,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;wBAE9C,IAAI,WAAW,CAAC,WAAW,EAAE;4BAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gCACnC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;6BACH;4BAED,aAAa,GAAG,UAAC,KAAY;gCAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oCAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iCACzB;6BACF,CAAC;4BACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;yBAClE;wBAED,IAAI,WAAW,CAAC,OAAO,EAAE;4BACvB,UAAU,CAAC;gCACT,eAAe,CAAC,KAAK,EAAE,CAAC;6BACzB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;yBACzB;wBAED,IAAI,WAAW,CAAC,QAAQ,EAAE;4BAClB,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;4BACrC,gBAAc,IAAI,QAAQ,EAAE,CAAC;4BAC7B,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;;gCAE9C,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oCAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iCACjB;gCACD,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oCAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iCACrD;qCAAM;oCACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iCAChC;6BACF,CAAC;4BACF,WAA2C,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gCAAlC,OAAO;gCACV,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gCACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oCAC5B,KAAS,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wCACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qCACxC;iCACF;qCAAM;oCACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iCACrC;6BACF;4BAED,WAAW,CAAC,IAAI,GAAG,aAAW,CAAC;4BAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;4BAC3B,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;4BAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gCACpE,IAAI,OAAO,aAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oCACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,mCAAiC,aAAW,CAAC,WAAW,EAAI,CAC7D,CAAC;iCACH;qCAAM;;oCAEL,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iCAC5C;6BACF;yBACF;wBAEG,IAAI,GAAG,WAAW,CAAC,IAAI;8BACvB,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;kCACpC,WAAW,CAAC,IAAI,EAAE;kCAClB,WAAW,CAAC,IAAI;8BAClB,SAAS,CAAC;wBACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;4BAChD,gBAAc,CAAC,CAAC;4BACd,kBAAkB,GAAG,IAAIC,gBAAS,CAAC;gCACvC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;oCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;oCAC5B,WAAW,CAAC,gBAAiB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;oCAC/C,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;iCAC5B;6BACF,CAAC,CAAC;4BAEH,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;6BAC/B;iCAAM;gCACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;6BAC9B;4BAED,IAAI,GAAG,kBAAkB,CAAC;yBAC3B;wBAEyD,qBAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,EAAA;;wBAFK,2BAA2B,GAAyB,SAEzD;wBAEK,WAAW,oBACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;;;;wBAIiC,qBAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,EAAA;;wBAAzE,QAAQ,GAAmB,SAA8C;wBAEzE,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;;4BAE7C,OAAO,EAAE,OAAO;4BAChB,OAAO,EAAE,WAAW;4BACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;4BACvB,kBAAkB,EAAE,WAAW,CAAC,kBAAkB;kCAC5C,QAAQ,CAAC,IAA0C;kCACrD,SAAS;;6BACD,CAAC,WAAW,CAAC,kBAAkB,EAA/B,wBAA+B;wBAAG,qBAAM,QAAQ,CAAC,IAAI,EAAE,EAAA;;wBAArB,KAAA,SAAqB,CAAA;;;wBAAG,KAAA,SAAS,CAAA;;;wBAPjF,iBAAiB,IAOf,aAAU,KAAqE;4BAC/E,aAAU,GAAE,QAAQ,CAAC,UAAU;4BAC/B,MAAG,GAAE,QAAQ,CAAC,GAAG;+BAClB,CAAC;wBAEI,uBAAqB,WAAW,CAAC,kBAAkB,CAAC;wBAC1D,IAAI,oBAAkB,EAAE;4BAChB,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;4BAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;gCAC9B,gBAAc,CAAC,CAAC;gCACd,oBAAoB,GAAG,IAAIA,gBAAS,CAAC;oCACzC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;wCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;wCAC5B,oBAAkB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;wCACpC,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;qCAC5B;iCACF,CAAC,CAAC;gCACH,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;gCACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;6BAC7D;iCAAM;gCACC,WAAS,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;gCACrE,IAAI,QAAM,EAAE;;oCAEV,oBAAkB,CAAC,EAAE,WAAW,EAAE,QAAM,EAAE,CAAC,CAAC;iCAC7C;6BACF;yBACF;wBAED,qBAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,EAAA;;wBAA5C,SAA4C,CAAC;wBAE7C,sBAAO,iBAAiB,EAAC;;;wBAEnB,UAAU,GAAe,OAAK,CAAC;wBACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;4BACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;6BAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;4BACxC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;wBAED,MAAM,UAAU,CAAC;;;wBAGjB,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;4BACxC,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;6BAC3C;4BACG,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;gCAC3D,kBAAkB,GAAG,gBAAgB,CAAC,iBAAkB,CAAC,kBAAkB,CAAC,CAAC;6BAC9E;4BAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;iCAChD,IAAI,CAAC;;gCACJ,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,EAAE;gCACtE,OAAO;6BACR,CAAC;iCACD,KAAK,CAAC,UAAC,EAAE,KAAO,CAAC,CAAC;yBACtB;;;;;;KAEJ;IAKH,sBAAC;AAAD,CAAC,IAAA;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB;IACxC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO;QACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC1B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;KAC7B,CAAC,CAAC;AACL,CAAC;AAED,SAAgB,YAAY,CAAC,OAAgB;IAC3C,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,GAAG;QACzB,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;KAC7B,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;;AC/PD;AACA,AAMA;;;AAGA;IAAA;QACmB,cAAS,GAAwD,EAAE,CAAC;KAqHtF;;;;IAhHQ,sBAAG,GAAV;QACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;KAC/C;;;;;;IAOM,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;QACnD,IAAI,aAAa,EAAE;YACjB,IAAI,cAAc,IAAI,SAAS,EAAE;gBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,GAAG,cAAc,CAAC,QAAQ,EAAE,CAAC;gBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;KACF;;;;;IAMM,sBAAG,GAAV,UAAW,aAAqB;QAC9B,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;KAClE;;;;IAKM,2BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;oBAA/C,IAAM,qBAAqB,uBAAA;oBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;KACf;;;;IAKa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY;oBAClB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB;4BACtB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB;4BACtB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;KACf;IACH,eAAC;AAAD,CAAC,IAAA;AAED;;;AAGA;IAAA;KAiPC;;;;;IAtOQ,8BAAS,GAAhB,UAAiB,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;KACF;;;;IAKM,8BAAS,GAAhB;QACE,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAiC;QAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;YACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;gBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;KACF;;;;;IAMM,+BAAU,GAAjB,UAAkB,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;IAKM,6BAAQ,GAAf,UAAgB,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;KACF;;;;;;IAOM,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;QAC3E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;KACF;;;;;IAMM,2CAAsB,GAA7B,UAA8B,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;KACtE;;;;IAKM,6BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;KACzD;;;;IAKO,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;QACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI;oBAChB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;iBAC/D;aACF;SACF;KACF;IAEM,6BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;KACf;;;;;IAMM,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;KACF;IAEa,gBAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;KACf;IACH,iBAAC;AAAD,CAAC,IAAA;AAMD;IACE,kBAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;KAAI;IAEzE,eAAM,GAApB,UAAqB,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACrC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;KACpC;IACH,eAAC;AAAD,CAAC,IAAA;AAED;;;;AAIA,SAAgB,uBAAuB,CAAC,SAAiB;IACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;SACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;SACrD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;AACJ,CAAC;AAED;;;AAGA;IAME,sBAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,GAAG,KAAK,GAAG,gBAAgB,CAAC;QACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;KACxB;;;;;IAMM,8BAAO,GAAd;QACE,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;;;;IAKM,2BAAI,GAAX;QACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa;gBACxB,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;KAC7B;IACH,mBAAC;AAAD,CAAC,IAAA;AAED;;;AAGA,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;AAGA,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;;AAGA,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;;AAIA,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;;AAIA,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;;AAIA,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;AAIA,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,GAAA,CAAC,CAAC;AACzF,CAAC;AAED;;;;AAIA,SAAS,kBAAkB,CAAC,SAAuB;IAAE,+BAAkC;SAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;QAAlC,8CAAkC;;IACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC;;AClpBD;AACA,SAWgB,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,IAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY;YAC9D,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAM,aAAa,CAAC,QAAQ,SAAI,aAAa,CAAC,QAAU,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,KAAG,aAAa,CAAC,QAAU,CAAC;KAC9D;IAED,IAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IACrE,IAAM,cAAc,GAAG,aAAa,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAC/D,IAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC3E,IAAM,YAAY,GAAG,WAAW,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAE3D,IAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAwBD,SAAgB,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAAoC;IAEpC,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAOC,qBAAqB,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAOC,oBAAoB,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAOC,oBAAoB,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAOC,mBAAmB,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC;;AChFD;AACA;IAgByCL,6CAAe;IAAxD;;KA2CC;IA1CO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;gBACzC,WAAW,GAA2C,EAAE,CAAC;gBAE/D,IAAI,WAAW,CAAC,aAAa,EAAE;oBACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;oBACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;wBACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;qBAChC;yBAAM,IAAI,SAAS,EAAE;wBACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;qBAC/B;iBACF;qBAAM,IAAI,WAAW,CAAC,aAAa,EAAE;oBAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;oBACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;iBAClC;gBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;oBAClC,IAAI,WAAW,CAAC,KAAK,EAAE;wBACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;qBACpC;yBAAM;wBACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;wBACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;8BAC7C,IAAIM,WAAW,CAAC,OAAO,CAAC;8BACxB,IAAIC,UAAU,CAAC,OAAO,CAAC,CAAC;wBAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;qBAC3B;iBACF;gBAED,sBAAO,WAAW,EAAC;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,kBAAyC;;;;gBAE5D,sBAAO;;;KACR;IACH,0BAAC;AAAD,CA3CA,CAAyC,eAAe;;ACjBxD;AACA,AAKA,WAAY,oBAAoB;;;;IAI9B,6DAAG,CAAA;;;;IAKH,iEAAK,CAAA;;;;IAKL,qEAAO,CAAA;;;;IAKP,+DAAI,CAAA;AACN,CAAC,EApBWC,4BAAoB,KAApBA,4BAAoB,QAoB/B;;AC1BD;AACA;AA2EA;;;;;AAKA,SAAgB,iBAAiB,CAAC,UAAmB;;;;;;IAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,QACE,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;SAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;AACJ,CAAC;;AChGD;AACA;AAkDA;;;;;AAKA,SAAgB,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,SAAgB,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;;ACzED;AACA,SAmFgB,iBAAiB,CAAC,aAA4B;IAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QACjF,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,GAAG,IAAI,CAAC;YACd,MAAM;SACP;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;;ACjGD;AACA,SAIgB,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,OAAO,GAAG,IAAIC,cAAc,CAAC;QACjC,QAAQ,EAAE,CAAC,IAAI,IAAI,EAAE,EAAE,QAAQ;QAC/B,UAAU,EAAE;YACV,MAAM,EAAE,KAAK;SACd;KACF,CAAC,CAAC;IACH,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAgB,QAAQ,CAAC,GAAW;IAClC,IAAM,SAAS,GAAG,IAAIC,aAAa,CAAC;QAClC,aAAa,EAAE,KAAK;QACpB,eAAe,EAAE,KAAK;QACtB,YAAY,EAAE,KAAK;KACpB,CAAC,CAAC;IACH,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAQ,EAAE,GAAQ;gBAC5C,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;aACF,CAAC,CAAC;SACJ;KACF,CAAC,CAAC;AACL,CAAC;;AClCD;AACA;IAmBE,2BACW,WAA0B,EAC1B,QAAkC;QADlC,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAA0B;KACzC;;;;;;IASG,qCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;KAC1C;;;;;;;IAQM,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;KACtC;IACH,wBAAC;AAAD,CAAC,IAAA;AAsBD;;;AAGA;IACE,8BAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;KAAI;;;;;;IAO7C,wCAAS,GAAhB,UAAiB,QAA8B;QAC7C,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAKF,4BAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;KACH;;;;;;;IAQM,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;KACF;IACH,2BAAC;AAAD,CAAC;;ACjGD;AACA,AAmCA;;;;AAIA,SAAgB,qBAAqB,CACnC,2BAAyD;IAEzD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAED,AAAO,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,AAAO,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF;;;;AAIA;IAA2CR,+CAAiB;IAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;QAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;QAJC,KAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;QAC/F,KAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;;KAC9F;IAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;gBAC/C,sBAAO,IAAI,CAAC,WAAW;yBACpB,WAAW,CAAC,OAAO,CAAC;yBACpB,IAAI,CAAC,UAAC,QAA+B;wBACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;qBAAA,CAC/E,EAAC;;;KACL;IACH,4BAAC;AAAD,CAxBA,CAA2C,iBAAiB,GAwB3D;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;IAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;QAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;QAC7E,IAAI,iBAAiB,EAAE;YACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;YACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;gBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;gBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;gBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;qBAC/B,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;gBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;gBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;sBAC1D,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;sBACrC,CAAC,CAAC,YAAY,CAAC;gBACnB,IAAI,CAAC,oBAAoB,EAAE;oBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;oBAC/E,IAAI,mBAAmB,EAAE;wBACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;8BAChE,6BAA2B,UAAY;8BACtC,cAAc,CAAC,UAAqB,CAAC;wBAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;wBACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;wBAC9B,KAAK,CAAC,OAAO,GAAGW,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;wBAC3D,KAAK,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;wBAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;wBAC5E,IAAI;4BACF,IAAI,mBAAmB,EAAE;gCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;gCACjC,IACE,yBAAyB;oCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;oCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCACjD;oCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;wCAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;qCACvC;oCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;wCAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;qCAC7C;iCACF;qCAAM;oCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;oCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCAC3C;oCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;wCACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;qCACvC;iCACF;gCAED,IAAI,yBAAyB,EAAE;oCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;oCAClD,IACE,aAAa,CAAC,KAAK;wCACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;wCACA,kBAAkB;4CAChB,OAAO,mBAAmB,KAAK,QAAQ;kDACnC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;kDAC9D,EAAE,CAAC;qCACV;oCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;iCACH;6BACF;yBACF;wBAAC,OAAO,YAAY,EAAE;4BACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;yBAC7J;wBACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;qBAC9B;iBACF;qBAAM,IAAI,YAAY,EAAE;oBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;wBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;wBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpF,kBAAkB;gCAChB,OAAO,kBAAkB,KAAK,QAAQ;sCAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;sCAC3D,EAAE,CAAC;yBACV;wBACD,IAAI;4BACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;yBACH;wBAAC,OAAO,KAAK,EAAE;4BACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;4BACF,SAAS,CAAC,OAAO,GAAGD,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC/D,SAAS,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;4BACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;yBAClC;qBACF;yBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;wBAE9C,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;qBAC7E;oBAED,IAAI,YAAY,CAAC,aAAa,EAAE;wBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;qBACH;iBACF;aACF;SACF;QACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;KACxC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;IAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;QACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;QACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;KAC1B,CAAC;IAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;QACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;cAC5C,EAAE;cACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,GAAA,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;gBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;aAC5B,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;iBAClB,IAAI,CAAC,UAAC,IAAI;gBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;aAC1B,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC;;ACrSD;AACA,SAyBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;AACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEnD;;;;AAIA;IAA4CZ,gDAAiB;;;;;;;;;;IA2B3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAXC,SAAS,QAAQ,CAAC,CAAM;YACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;SAC9B;QACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;QACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;QAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;cAC9C,gBAAgB;cAChB,iCAAiC,CAAC;QACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;cAC9C,gBAAgB;cAChB,iCAAiC,CAAC;;KACvC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAKC;QAJC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC;aAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;KAC7E;IACH,6BAAC;AAAD,CAvDA,CAA4C,iBAAiB,GAuD5D;AAED;;;;;;;;AAQA,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;IAEpB,IACE,UAAU,IAAI,SAAS;SACtB,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;QACxC,UAAU,KAAK,GAAG;QAClB,UAAU,KAAK,GAAG,EAClB;QACA,OAAO,KAAK,CAAC;KACd;IAED,IAAI,YAAoB,CAAC;IACzB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;;AAOA,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;IAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;QAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;QAC7E,OAAOa,KACC,CAAC,SAAS,CAAC,aAAa,CAAC;aAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAA,CAAC;aAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,GAAA,CAAC;aAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,GAAA,CAAC,CAAC;KACrE;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;QAEjD,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM;QACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAClC;AACH,CAAC;;AC3ND;AACA,SAYgB,6BAA6B,CAC3C,mBAA8C;IAA9C,oCAAA,EAAA,8CAA8C;IAE9C,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAED;IAAmDb,uDAAiB;IAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;QAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;KAGrC;IAEM,mDAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAEc,YAAkB,EAAE,CAAC,CAAC;SACtE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,oCAAC;AAAD,CAfA,CAAmD,iBAAiB,GAenE;;ACtCD;AACA,SAMgB,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,SAAgB,uBAAuB;IACrC,IAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,MAAIC,OAAO,EAAE,SAAIC,OAAO,EAAE,SAAIC,UAAU,EAAE,MAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC;;ACvBD;AACA,AAgBA,SAAS,cAAc;IACrB,IAAM,aAAa,GAAG;QACpB,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,SAAS,CAAC,aAAa;KAC/B,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;IADpB,6BAAA,EAAA,kBAAkB;IAClB,+BAAA,EAAA,oBAAoB;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,UAAC,IAAI;QACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,GAAG,EAAE,CAAC;QACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;KAC9B,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,AAAO,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,SAAgB,wBAAwB;IACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAgB,eAAe,CAAC,aAA6B;IAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,GAAG,sBAAsB,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC;IAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;UAC9C,wBAAwB,EAAE;UAC1B,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAqCjB,yCAAiB;IACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;QAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;QANU,iBAAW,GAAX,WAAW,CAAe;QAC1B,cAAQ,GAAR,QAAQ,CAA0B;QACjC,eAAS,GAAT,SAAS,CAAQ;QACjB,iBAAW,GAAX,WAAW,CAAQ;;KAG9B;IAED,qCAAW,GAAX,UAAY,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IAED,4CAAkB,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;KACF;IACH,sBAAC;AAAD,CAxBA,CAAqC,iBAAiB,GAwBrD;;ACvFD;AACA,AA4BO,IAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF,SAAgB,cAAc,CAAC,cAAmB;IAAnB,+BAAA,EAAA,mBAAmB;IAChD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAChE;KACF,CAAC;AACJ,CAAC;AAED;IAAoCA,wCAAiB;IACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;QAAf,2BAAA,EAAA,eAAe;QAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,gBAAU,GAAV,UAAU,CAAK;;KAGzB;IAEM,oCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;KAC1D;IACH,qBAAC;AAAD,CAdA,CAAoC,iBAAiB,GAcpD;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;IACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;SACb,MAAM,KAAK,GAAG;aACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aACnE,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;SAChB,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa;aAC5E,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;QACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;;QAKjC,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,KAAK,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;YACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,GAAA,CAAC;aAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;KACpD;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;;;IAGvE,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;QACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;KACzB;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;;SC5Fe,oBAAoB,CAAC,YAAiB;IAAjB,6BAAA,EAAA,iBAAiB;IACpD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAED;IAA0CA,8CAAiB;IACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;QAAlB,8BAAA,EAAA,kBAAkB;QAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,mBAAa,GAAb,aAAa,CAAK;;KAG5B;IAEM,0CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;KAClE;IACH,2BAAC;AAAD,CAdA,CAA0C,iBAAiB,GAc1D;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;iBAI3C,KAAK,CAAC,cAAM,OAAA,KAAK,GAAA,CAAC;iBAClB,IAAI,CAAC,UAAC,kBAAkB;gBACvB,IAAI,kBAAkB,EAAE;;;oBAGtB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEc,YAAkB,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;aACjB,CAAC,EACJ;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;;AAMA,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;IAAnB,4BAAA,EAAA,mBAAmB;IAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;;;IAID,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;IAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;;AAMA,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAG,EAAE;;SAEb;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;AAMA,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;;;AASA,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;IACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;IAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;QAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;SAC7F;QACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;KAC/D,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;AASA,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;QACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;QAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;YACrF,OAAO,IAAI,CAAC;SACb;aAAM;YACL,OAAOI,KACC,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;iBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,GAAA,CAAC,CAAC;SACpE;KACF,CAAC,CAAC;AACL,CAAC;;ACpMD;AACA,SAYgB,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;SACvE;KACF,CAAC;AACJ,CAAC;AAED;IAAmClB,uCAAiB;IAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;QAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;KAGxD;IAED,mCAAW,GAAX,UAAY,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KACzD;IAEM,mCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;YAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;SAAA,CAC1C,CAAC;KACH;IACH,oBAAC;AAAD,CAlBA,CAAmC,iBAAiB,GAkBnD;;ACzCD;AACA,SAwBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;;;;;;;;;AAUA;IAA4CA,gDAAiB;IAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;QAC1C,gCAA0B,GAAG,CAAC,CAAC;QAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;QAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,GAAG,KAAI,CAAC,0BAA0B,CAAC;QAChG,KAAI,CAAC,aAAa;YAChB,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,GAAG,KAAI,CAAC,6BAA6B,CAAC;QACzF,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;kBAChC,gBAAgB;kBAChB,KAAI,CAAC,iCAAiC,CAAC;QAC7C,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;kBAChC,gBAAgB;kBAChB,KAAI,CAAC,iCAAiC,CAAC;;KAC9C;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAAmB,OAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;KAClE;IACH,6BAAC;AAAD,CArCA,CAA4C,iBAAiB,GAqC5D;AAED;;;;;;;AAOA,SAASC,aAAW,CAAC,MAA8B,EAAE,SAAoB;IACvE,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;AAMA,SAASC,iBAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;IAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAeF,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;oBAErB,SAAS,GAAGE,iBAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;0BAElD,GAAG;wBACH,GAAG,CAAC,IAAI;wBACRD,aAAW,CAAC,MAAM,EAAE,SAAS,CAAC;yBAC7B,GAAG,CAAC,IAAI,KAAK,WAAW;4BACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;4BAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;4BAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;4BACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;oBAItB,qBAAME,KAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;oBAA1C,SAA0C,CAAC;oBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;oBAEvD,sBAAOH,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;oBAGrE,IAAI,GAAG,EAAE;;wBAEP,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;qBACxC;oBACD,sBAAO,iBAAiB,EAAC;;;;;CAE5B;;AC1LD;AACA,AAKA,WAAY,qBAAqB;IAC/B,kCAAS,CAAA;IACT,kCAAS,CAAA;IACT,mCAAU,CAAA;IACV,oCAAW,CAAA;IACX,wCAAe,CAAA;AACjB,CAAC,EANWI,6BAAqB,KAArBA,6BAAqB,QAMhC;;ACZD;AACA,SAYgB,WAAW,CAAC,aAA6B;IACvD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAiCvB,qCAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;KACpC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,kBAAC;AAAD,CAlBA,CAAiC,iBAAiB,GAkBjD;;ACvCD;AACA,AAcA;;;AAGA,AAAO,IAAM,WAAW,GAAa,WAAW,EAAE,CAAC;AACnD,IAAM,YAAY,GAAyB,IAAI,GAAG,EAAE,CAAC;AAErD;;;AAGA,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,IAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,IAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,IAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;AACA;AACA;AACA,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,IAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC/B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAsB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA9B,IAAM,OAAO,oBAAA;QAChB,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;;;YAGtB,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;AAGA,SAAgB,WAAW;IACzB,IAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,IAAI,EAAE,GAAA,CAAC;aAC1B,MAAM,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,MAAM,GAAA,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;AAGA,SAAS,kBAAkB,CACzB,GAAW;IAEX,IAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,IAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,IAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,GAAG,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,IAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,IAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,IAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,GAAG,IAAI,CAAC;IACpE,IAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;IAC1E,IAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,cAAc,gBAAA;KACf,CAAC;AACJ,CAAC;AAED,SAAgB,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAEK,IAAA,KAAyC,kBAAkB,CAAC,QAAQ,CAAC,EAAnE,QAAQ,cAAA,EAAE,QAAQ,cAAA,EAAE,cAAc,oBAAiC,CAAC;IAC5E,IAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,IAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,GAAG,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ,UAAA;QACR,QAAQ,UAAA;KACT,CAAC;AACJ,CAAC;AAED,SAAgB,WAAW,CAAC,aAA6B;IACvD,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAiCA,qCAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;KACpC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACtD,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,kBAAC;AAAD,CAlBA,CAAiC,iBAAiB,GAkBjD;;ACvKD;AACA,AAaA,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,SAAgB,qBAAqB,CACnC,UAAwC;IAAxC,2BAAA,EAAA,gCAAwC;IAExC,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;SACnE;KACF,CAAC;AACJ,CAAC;AAED;;;;;;AAMA;IAA2CA,+CAAiB;IAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;QAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;KAC9B;IAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;gBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;wBACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;qBAC7C,CAAC,EAAC;;;KACJ;IAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;wBAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;4BACvD,sBAAO,YAAY,EAAC;yBACrB;wBAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;8BAEE,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;wBAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;6BACE,SAAS,EAAT,wBAAS;wBACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;wBAAtB,SAAsB,CAAC;wBACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;wBAArD,GAAG,GAAG,SAA+C;wBAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;4BAIxD,sBAAO,YAAY,EAAC;;;;KACrB;IAEa,2CAAqB,GAAnC,UAAoC,WAAmB;QACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;KACF;IAEa,+CAAyB,GAAvC,UAAwC,WAAmB;QACzD,IAAI;YACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,SAAS,CAAC;SAClB;KACF;IACH,4BAAC;AAAD,CA7DA,CAA2C,iBAAiB,GA6D3D;;ACxGD;AACA,AASA,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;;AAGA,AAAO,IAAM,6BAA6B,GAAG;IAC3C,gCAAgC;IAChC,qCAAqC;IACrC,sCAAsC;IACtC,gCAAgC;CACjC,CAAC;AAEF;;;;AAIA;IAGE,wCACE,oBAAqC,EACrC,MAAmE;QAAnE,uBAAA,EAAA,gDAAmE;QAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;IAEY,iDAAQ,GAArB;;;;;4BACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;wBAAnE,WAAW,GAAG,SAAqD;wBACzE,IAAI,WAAW,KAAK,IAAI,EAAE;4BAClB,MAAM,GAAkB;gCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;gCAC9B,SAAS,EAAE,4BAA4B;gCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;6BAC1C,CAAC;4BACF,sBAAO,MAAM,EAAC;yBACf;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;yBAC/C;;;;KACF;IAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;4BACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;wBAArC,aAAa,GAAG,SAAqB;wBAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrBwB,SAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;wBACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;KACrC;IACH,qCAAC;AAAD,CAAC;;AC3DD;AACA,AA+JA;;;;AAIA;;;;;;;IAgCE,uBACE,WAAwD,EACxD,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;QAED,IAAI,wBAA8D,CAAC;QACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;YAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;YAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;gBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;aACvC;YACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;SACnF;aAAM;YACL,wBAAwB,GAAG,WAAW,CAAC;SACxC;QAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAIC,mBAAiB,EAAE,CAAC;QACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;YACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACpF,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;KACvD;;;;IAKD,mCAAW,GAAX,UAAY,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;KAC9C;;;;;;;IAQD,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAEtC,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;oBAAnD,IAAM,YAAY,SAAA;oBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;oBAAvD,IAAM,cAAc,SAAA;oBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;wBACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;wBACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;4BAChD,IAAI,cAAc,CAAC,gBAAgB,KAAKF,6BAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,mBAAmB,GAAG,EAAE,CAAC;iCAC1B;qCAAM;oCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;qCACvE;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;4BAC5C,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,EAAE;gBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;oBAAzD,IAAM,eAAe,SAAA;oBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,IAAI,SAAS,EAAE;wBAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;wBACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;gCAAvC,IAAM,GAAG,SAAA;gCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;gBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;aACnE;YAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;gBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;aAAA,CAC1D,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,IAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;;iBAEH,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,GAAA,CAAC;iBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;KACf;IACH,oBAAC;AAAD,CAAC,IAAA;SAEe,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;IAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;QACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI;YACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;gBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;gBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BG,kBAAwB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;4BAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;yBACpC,CAAC,CAAC;qBACJ;iBACF;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;YAA7D,IAAM,iBAAiB,SAAA;YAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;gBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,QAAa;IAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;IAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,WAAW,EAAE;QACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;YACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC7B;aAAM;YACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;SAC5C;KACF;IAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IAED,IAAM,eAAe,qCAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;IACF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KAC5D;IAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;IACzE,IAAI,aAAa,EAAE;QACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,CAAC,aAAa,EAAE;QACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAID,AAkBA,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,SAAgB,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;6BACvB,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;aAC7F;;YAGD,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;SACnE;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;YACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;YAEF,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;YACxE,IAAI,aAAa,KAAK,SAAS,EAAE;gBAC/B,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;QAEnD,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;YACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;QACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YACtC,KAAK,EAAE,SAAS;SACjB,CAAC;KAAA,CAAC;IAEL,IAAI,UAAU,EAAE;QACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,mCACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;QAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,GAAA,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;;;YAGjD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS,CAAC,UAAU,GAAG,EAAE,CAAC;YACnF,IAAM,aAAa,GAAGC,qBAAI,UAAU,CAAyB,CAAC;YAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAA3C,IAAM,GAAG,SAAA;gBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;oBAAzC,IAAM,GAAG,SAAA;oBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,mCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnCC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;QAEA,OAAO,oBAAoB,mCACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,mCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;;ACn1BD;AACA,SAWgB,SAAS,CAAC,MAAyB;IAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;IACjD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;SACnD;KACF,CAAC;AACJ,CAAC;AAED;IAA+B5B,mCAAiB;IAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;KACtB;IAEM,+BAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAEC;QADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;KAC9F;IACH,gBAAC;AAAD,CAfA,CAA+B,iBAAiB,GAe/C;AAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;IAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;IAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;IAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;IACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;IAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;;AC9CD;AACA,AAOA,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM6B,8BAA4B,GAAG,QAAQ,CAAC;AAE9C;;;AAGA;;;;;;;;IAWE,0BAAY,KAAa,EAAE,mBAA0D;QAA1D,oCAAA,EAAA,oDAA0D;QATrF,wBAAmB,GAAWA,8BAA4B,CAAC;QAUzD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;SACvD;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;;;;;;;IAQD,sCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;QACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,uBAAC;AAAD,CAAC;;AC/CD;AACA,AAOA,IAAMC,iBAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAMD,8BAA4B,GAAG,OAAO,CAAC;AAE7C;;;;;;;;;IAaE,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;QAA1D,oCAAA,EAAA,oDAA0D;QAb5D,wBAAmB,GAAWA,8BAA4B,CAAC;QAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;;;;;;;IAQD,oDAAW,GAAX,UAAY,WAA4B;QACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;QACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAIE,YAAmB,CAAC,WAAW,CAAG,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAACD,iBAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,qCAAC;AAAD,CAAC;;ACrDD;AACA,AAqBA;;;AAGA;;;;;IAcE,2BAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;KAChC;;;;;;;IAQD,uCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,wBAAC;AAAD,CAAC;;ACxFD;AACA;IAIsC9B,0CAAiB;;;;;;;IAOrD,0BAAY,QAAgB;QAA5B,iBAUC;QATC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;KAChB;IACH,uBAAC;AAAD,CAlBA,CAAsC,iBAAiB;;ACLvD;AACA;IAIuCA,2CAAiB;;;;;;;IAOtD,2BAAY,SAAiB;QAA7B,iBAUC;QATC,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;YAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;SACtF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,SAAS;aACzB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;KAChB;IACH,wBAAC;AAAD,CAlBA,CAAuC,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dom-shim.d.ts b/node_modules/@azure/ms-rest-js/dom-shim.d.ts new file mode 100644 index 0000000..cb4ac4d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dom-shim.d.ts @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// d.ts shims provide types for things we use internally but are not part +// of this package's surface area. + +interface Request {} +interface RequestInit {} +interface Response {} +interface Headers {} diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts new file mode 100644 index 0000000..8fa67fd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts @@ -0,0 +1,9 @@ +import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient } from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class BrowserFetchHttpClient extends FetchHttpClient { + prepareRequest(_httpRequest: WebResourceLike): Promise>; + processRequest(_operationResponse: HttpOperationResponse): Promise; + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} +//# sourceMappingURL=browserFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map new file mode 100644 index 0000000..a3ae8ca --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"browserFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/browserFetchHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,qBAAa,sBAAuB,SAAQ,eAAe;IACzD,cAAc,CAAC,YAAY,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAI5E,cAAc,CAAC,kBAAkB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxE,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;CAGnF"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js new file mode 100644 index 0000000..eea907f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { FetchHttpClient, } from "./fetchHttpClient"; +var BrowserFetchHttpClient = /** @class */ (function (_super) { + __extends(BrowserFetchHttpClient, _super); + function BrowserFetchHttpClient() { + return _super !== null && _super.apply(this, arguments) || this; + } + BrowserFetchHttpClient.prototype.prepareRequest = function (_httpRequest) { + return Promise.resolve({}); + }; + BrowserFetchHttpClient.prototype.processRequest = function (_operationResponse) { + return Promise.resolve(); + }; + BrowserFetchHttpClient.prototype.fetch = function (input, init) { + return fetch(input, init); + }; + return BrowserFetchHttpClient; +}(FetchHttpClient)); +export { BrowserFetchHttpClient }; +//# sourceMappingURL=browserFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map new file mode 100644 index 0000000..a47796d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"browserFetchHttpClient.js","sourceRoot":"","sources":["../../lib/browserFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAI3B;IAA4C,0CAAe;IAA3D;;IAYA,CAAC;IAXC,+CAAc,GAAd,UAAe,YAA6B;QAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IAC7B,CAAC;IAED,+CAAc,GAAd,UAAe,kBAAyC;QACtD,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;IAED,sCAAK,GAAL,UAAM,KAAwB,EAAE,IAAwB;QACtD,OAAO,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAC5B,CAAC;IACH,6BAAC;AAAD,CAAC,AAZD,CAA4C,eAAe,GAY1D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts new file mode 100644 index 0000000..f566c6f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,46 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +/** + * @interface ApiKeyCredentialOptions + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map new file mode 100644 index 0000000..6a91bfc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAEtE;;;GAGG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAChC;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;CAChC;AAED;;GAEG;AACH,qBAAa,iBAAkB,YAAW,wBAAwB;IAChE;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAuB;IACjD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAuB;IAEhD;;;OAGG;gBACS,OAAO,EAAE,uBAAuB;IAU5C;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAiCpE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js new file mode 100644 index 0000000..587a24a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +/** + * Authenticates to a service using an API key. + */ +var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; +}()); +export { ApiKeyCredentials }; +//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map new file mode 100644 index 0000000..7f3fc82 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAmB7C;;GAEG;AACH;IAUE;;;OAGG;IACH,2BAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACH,uCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,wBAAC;AAAD,CAAC,AA/DD,IA+DC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts new file mode 100644 index 0000000..7cb357e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts @@ -0,0 +1,20 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResource } from "../webResource"; +import { TokenCredential } from "@azure/core-auth"; +import { TokenResponse } from "./tokenResponse"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export declare const azureResourceManagerEndpoints: string[]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +export declare class AzureIdentityCredentialAdapter implements ServiceClientCredentials { + private azureTokenCredential; + private scopes; + constructor(azureTokenCredential: TokenCredential, scopes?: string | string[]); + getToken(): Promise; + signRequest(webResource: WebResource): Promise; +} +//# sourceMappingURL=azureIdentityTokenCredentialAdapter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map new file mode 100644 index 0000000..fff7fbd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureIdentityTokenCredentialAdapter.d.ts","sourceRoot":"","sources":["../../../lib/credentials/azureIdentityTokenCredentialAdapter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAEtE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACnD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAIhD;;GAEG;AACH,eAAO,MAAM,6BAA6B,UAKzC,CAAC;AAEF;;;GAGG;AACH,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E,OAAO,CAAC,oBAAoB,CAAkB;IAC9C,OAAO,CAAC,MAAM,CAAoB;gBAEhC,oBAAoB,EAAE,eAAe,EACrC,MAAM,GAAE,MAAM,GAAG,MAAM,EAA4C;IAMxD,QAAQ,IAAI,OAAO,CAAC,aAAa,CAAC;IAclC,WAAW,CAAC,WAAW,EAAE,WAAW;CAQlD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js new file mode 100644 index 0000000..3fdea9b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __generator } from "tslib"; +import { Constants as MSRestConstants } from "../util/constants"; +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return __awaiter(this, void 0, void 0, function () { + var accessToken, result; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + return [2 /*return*/]; + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return __awaiter(this, void 0, void 0, function () { + var tokenResponse; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(MSRestConstants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; +}()); +export { AzureIdentityCredentialAdapter }; +//# sourceMappingURL=azureIdentityTokenCredentialAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map new file mode 100644 index 0000000..fa8bacd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureIdentityTokenCredentialAdapter.js","sourceRoot":"","sources":["../../../lib/credentials/azureIdentityTokenCredentialAdapter.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EAAE,SAAS,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAMjE,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;GAEG;AACH,MAAM,CAAC,IAAM,6BAA6B,GAAG;IAC3C,gCAAgC;IAChC,qCAAqC;IACrC,sCAAsC;IACtC,gCAAgC;CACjC,CAAC;AAEF;;;GAGG;AACH;IAGE,wCACE,oBAAqC,EACrC,MAAmE;QAAnE,uBAAA,EAAA,gDAAmE;QAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAEY,iDAAQ,GAArB;;;;;4BACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;wBAAnE,WAAW,GAAG,SAAqD;wBACzE,IAAI,WAAW,KAAK,IAAI,EAAE;4BAClB,MAAM,GAAkB;gCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;gCAC9B,SAAS,EAAE,4BAA4B;gCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;6BAC1C,CAAC;4BACF,sBAAO,MAAM,EAAC;yBACf;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;yBAC/C;;;;;KACF;IAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;4BACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;wBAArC,aAAa,GAAG,SAAqB;wBAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;wBACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;KACrC;IACH,qCAAC;AAAD,CAAC,AAjCD,IAiCC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 0000000..c313e52 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,24 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + userName: string; + password: string; + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map new file mode 100644 index 0000000..59a2b8c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAItE,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;;OAOG;gBAED,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,EAChB,mBAAmB,GAAE,MAAqC;IAa5D;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe;CAOzC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js new file mode 100644 index 0000000..e1553b0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + base64.encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; +}()); +export { BasicAuthenticationCredentials }; +//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map new file mode 100644 index 0000000..a89f01e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;IAKE;;;;;;;OAOG;IACH,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;QAA1D,oCAAA,EAAA,kDAA0D;QAb5D,wBAAmB,GAAW,4BAA4B,CAAC;QAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,oDAAW,GAAX,UAAY,WAA4B;QACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;QACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAI,MAAM,CAAC,YAAY,CAAC,WAAW,CAAG,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,qCAAC;AAAD,CAAC,AA1CD,IA0CC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts new file mode 100644 index 0000000..34c7db7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts @@ -0,0 +1,2 @@ +export declare type Authenticator = (challenge: object) => Promise; +//# sourceMappingURL=credentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map new file mode 100644 index 0000000..81ab42b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/credentials.ts"],"names":[],"mappings":"AAGA,oBAAY,aAAa,GAAG,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js new file mode 100644 index 0000000..a9eeec2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map new file mode 100644 index 0000000..939147b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.js","sourceRoot":"","sources":["../../../lib/credentials/credentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts new file mode 100644 index 0000000..481819c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts @@ -0,0 +1,11 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +export declare class DomainCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + constructor(domainKey: string); +} +//# sourceMappingURL=domainCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map new file mode 100644 index 0000000..0295f07 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"domainCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/domainCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF,qBAAa,iBAAkB,SAAQ,iBAAiB;IACtD;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;CAW9B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js new file mode 100644 index 0000000..51cb0ae --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { ApiKeyCredentials } from "./apiKeyCredentials"; +var DomainCredentials = /** @class */ (function (_super) { + __extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; +}(ApiKeyCredentials)); +export { DomainCredentials }; +//# sourceMappingURL=domainCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map new file mode 100644 index 0000000..579d63e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"domainCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/domainCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF;IAAuC,qCAAiB;IACtD;;;;;OAKG;IACH,2BAAY,SAAiB;QAA7B,iBAUC;QATC,IAAI,CAAC,SAAS,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;YAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;SACtF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,SAAS;aACzB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;IACjB,CAAC;IACH,wBAAC;AAAD,CAAC,AAlBD,CAAuC,iBAAiB,GAkBvD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts new file mode 100644 index 0000000..af0bbac --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,11 @@ +import { WebResourceLike } from "../webResource"; +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike/request to be signed. + * @returns {Promise} The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map new file mode 100644 index 0000000..2c091a3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD,MAAM,WAAW,wBAAwB;IACvC;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CACrE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js new file mode 100644 index 0000000..e94df45 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map new file mode 100644 index 0000000..68ab158 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts new file mode 100644 index 0000000..95a5c31 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts @@ -0,0 +1,25 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +export declare class TokenCredentials implements ServiceClientCredentials { + token: string; + authorizationScheme: string; + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(token: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=tokenCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map new file mode 100644 index 0000000..85e3807 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/tokenCredentials.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAKtE;;GAEG;AACH,qBAAa,gBAAiB,YAAW,wBAAwB;IAC/D,KAAK,EAAE,MAAM,CAAC;IACd,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;OAMG;gBACS,KAAK,EAAE,MAAM,EAAE,mBAAmB,GAAE,MAAqC;IAQrF;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe;CAQzC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js new file mode 100644 index 0000000..70265d7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; +}()); +export { TokenCredentials }; +//# sourceMappingURL=tokenCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map new file mode 100644 index 0000000..302ef4e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/tokenCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAI9C,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;GAEG;AACH;IAIE;;;;;;OAMG;IACH,0BAAY,KAAa,EAAE,mBAA0D;QAA1D,oCAAA,EAAA,kDAA0D;QATrF,wBAAmB,GAAW,4BAA4B,CAAC;QAUzD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;SACvD;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,sCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;QACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,uBAAC;AAAD,CAAC,AAjCD,IAiCC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts new file mode 100644 index 0000000..eb4f179 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts @@ -0,0 +1,10 @@ +/** + * TokenResponse is defined in `@azure/ms-rest-nodeauth` and is copied here to not + * add an unnecessary dependency. + */ +export interface TokenResponse { + readonly tokenType: string; + readonly accessToken: string; + readonly [x: string]: any; +} +//# sourceMappingURL=tokenResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map new file mode 100644 index 0000000..7636a8a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenResponse.d.ts","sourceRoot":"","sources":["../../../lib/credentials/tokenResponse.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,EAAE,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAC3B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js new file mode 100644 index 0000000..42ec199 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=tokenResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map new file mode 100644 index 0000000..b145bf0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenResponse.js","sourceRoot":"","sources":["../../../lib/credentials/tokenResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts new file mode 100644 index 0000000..059fe93 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts @@ -0,0 +1,11 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map new file mode 100644 index 0000000..cf4a12b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/topicCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD;;;;;OAKG;gBACS,QAAQ,EAAE,MAAM;CAW7B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js new file mode 100644 index 0000000..bed72a9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { ApiKeyCredentials } from "./apiKeyCredentials"; +var TopicCredentials = /** @class */ (function (_super) { + __extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; +}(ApiKeyCredentials)); +export { TopicCredentials }; +//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map new file mode 100644 index 0000000..267491d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/topicCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF;IAAsC,oCAAiB;IACrD;;;;;OAKG;IACH,0BAAY,QAAgB;QAA5B,iBAUC;QATC,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;IACjB,CAAC;IACH,uBAAC;AAAD,CAAC,AAlBD,CAAsC,iBAAiB,GAkBtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts new file mode 100644 index 0000000..883145a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map new file mode 100644 index 0000000..9a04172 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.d.ts","sourceRoot":"","sources":["../../lib/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js new file mode 100644 index 0000000..49ae427 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map new file mode 100644 index 0000000..d21b02e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.js","sourceRoot":"","sources":["../../lib/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts new file mode 100644 index 0000000..a7381a2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map new file mode 100644 index 0000000..35b8b1f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.d.ts","sourceRoot":"","sources":["../../lib/defaultHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js new file mode 100644 index 0000000..a8b56aa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map new file mode 100644 index 0000000..1effd0f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.js","sourceRoot":"","sources":["../../lib/defaultHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts new file mode 100644 index 0000000..eb15bc1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts @@ -0,0 +1,23 @@ +import { HttpClient } from "./httpClient"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { HttpHeadersLike } from "./httpHeaders"; +export declare type CommonRequestInfo = string; +export declare type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; +export declare type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; +export declare abstract class FetchHttpClient implements HttpClient { + sendRequest(httpRequest: WebResourceLike): Promise; + abstract prepareRequest(httpRequest: WebResourceLike): Promise>; + abstract processRequest(operationResponse: HttpOperationResponse): Promise; + abstract fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +//# sourceMappingURL=fetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map new file mode 100644 index 0000000..926678c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/fetchHttpClient.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAU7D,oBAAY,iBAAiB,GAAG,MAAM,CAAC;AAEvC,oBAAY,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,GAAG,QAAQ,CAAC,GAAG;IACjF,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;CACd,CAAC;AAEF,oBAAY,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC,GAAG;IAC7E,IAAI,EAAE,GAAG,CAAC;IACV,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;CACf,CAAC;AAEF,8BAAsB,eAAgB,YAAW,UAAU;IACnD,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;aAoMhE,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;aAC3E,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;aACvE,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;CAClG;AAcD,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,eAAe,CAQ9D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js new file mode 100644 index 0000000..92c646a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __assign, __awaiter, __generator } from "tslib"; +import AbortController from "abort-controller"; +import FormData from "form-data"; +import { HttpHeaders } from "./httpHeaders"; +import { RestError } from "./restError"; +import { Transform } from "stream"; +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { + } + FetchHttpClient.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var abortController, abortListener, formData, requestForm_1, appendFormValue, _i, _a, formKey, formValue, j, contentType, body, loadedBytes_1, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, _b, _c, onDownloadProgress_1, responseBody, loadedBytes_2, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResource) cannot be null or undefined and must be of type object."); + } + abortController = new AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + formKey = _a[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + loadedBytes_1 = 0; + uploadReportStream = new Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_1 += chunk.length; + httpRequest.onUploadProgress({ loadedBytes: loadedBytes_1 }); + callback(undefined, chunk); + }, + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _d.sent(); + requestInit = __assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + _d.label = 2; + case 2: + _d.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _d.sent(); + headers = parseHeaders(response.headers); + _b = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? response.body + : undefined + }; + if (!!httpRequest.streamResponseBody) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _d.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _d.label = 6; + case 6: + operationResponse = (_b.bodyAsText = _c, + _b.redirected = response.redirected, + _b.url = response.url, + _b); + onDownloadProgress_1 = httpRequest.onDownloadProgress; + if (onDownloadProgress_1) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + loadedBytes_2 = 0; + downloadReportStream = new Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_2 += chunk.length; + onDownloadProgress_1({ loadedBytes: loadedBytes_2 }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress_1({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _d.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _d.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (_e) { }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; + } + }); + }); + }; + return FetchHttpClient; +}()); +export { FetchHttpClient }; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise(function (resolve) { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +export function parseHeaders(headers) { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map new file mode 100644 index 0000000..4c8e919 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fetchHttpClient.js","sourceRoot":"","sources":["../../lib/fetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,eAAe,MAAM,kBAAkB,CAAC;AAC/C,OAAO,QAAQ,MAAM,WAAW,CAAC;AAKjC,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAsB7C;IAAA;IAwMA,CAAC;IAvMO,qCAAW,GAAjB,UAAkB,WAA4B;;;;;;wBAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnD,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;yBACH;wBAEK,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;wBAE9C,IAAI,WAAW,CAAC,WAAW,EAAE;4BAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gCACnC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;6BACH;4BAED,aAAa,GAAG,UAAC,KAAY;gCAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oCAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iCACzB;4BACH,CAAC,CAAC;4BACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;yBAClE;wBAED,IAAI,WAAW,CAAC,OAAO,EAAE;4BACvB,UAAU,CAAC;gCACT,eAAe,CAAC,KAAK,EAAE,CAAC;4BAC1B,CAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;yBACzB;wBAED,IAAI,WAAW,CAAC,QAAQ,EAAE;4BAClB,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;4BACrC,gBAAc,IAAI,QAAQ,EAAE,CAAC;4BAC7B,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;gCAC9C,0FAA0F;gCAC1F,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oCAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iCACjB;gCACD,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oCAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iCACrD;qCAAM;oCACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iCAChC;4BACH,CAAC,CAAC;4BACF,WAA2C,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gCAAlC,OAAO;gCACV,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gCACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oCAC5B,KAAS,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wCACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qCACxC;iCACF;qCAAM;oCACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iCACrC;6BACF;4BAED,WAAW,CAAC,IAAI,GAAG,aAAW,CAAC;4BAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;4BAC3B,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;4BAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gCACpE,IAAI,OAAO,aAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oCACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,mCAAiC,aAAW,CAAC,WAAW,EAAI,CAC7D,CAAC;iCACH;qCAAM;oCACL,kEAAkE;oCAClE,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iCAC5C;6BACF;yBACF;wBAEG,IAAI,GAAG,WAAW,CAAC,IAAI;4BACzB,CAAC,CAAC,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;gCACtC,CAAC,CAAC,WAAW,CAAC,IAAI,EAAE;gCACpB,CAAC,CAAC,WAAW,CAAC,IAAI;4BACpB,CAAC,CAAC,SAAS,CAAC;wBACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;4BAChD,gBAAc,CAAC,CAAC;4BACd,kBAAkB,GAAG,IAAI,SAAS,CAAC;gCACvC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;oCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;oCAC5B,WAAW,CAAC,gBAAiB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;oCAC/C,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;gCAC7B,CAAC;6BACF,CAAC,CAAC;4BAEH,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;6BAC/B;iCAAM;gCACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;6BAC9B;4BAED,IAAI,GAAG,kBAAkB,CAAC;yBAC3B;wBAEyD,qBAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,EAAA;;wBAFK,2BAA2B,GAAyB,SAEzD;wBAEK,WAAW,cACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;;;;wBAIiC,qBAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,EAAA;;wBAAzE,QAAQ,GAAmB,SAA8C;wBAEzE,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;;4BAE7C,OAAO,EAAE,OAAO;4BAChB,OAAO,EAAE,WAAW;4BACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;4BACvB,kBAAkB,EAAE,WAAW,CAAC,kBAAkB;gCAChD,CAAC,CAAG,QAAQ,CAAC,IAA0C;gCACvD,CAAC,CAAC,SAAS;;6BACD,CAAC,WAAW,CAAC,kBAAkB,EAA/B,wBAA+B;wBAAG,qBAAM,QAAQ,CAAC,IAAI,EAAE,EAAA;;wBAArB,KAAA,SAAqB,CAAA;;;wBAAG,KAAA,SAAS,CAAA;;;wBAPjF,iBAAiB,IAOf,aAAU,KAAqE;4BAC/E,aAAU,GAAE,QAAQ,CAAC,UAAU;4BAC/B,MAAG,GAAE,QAAQ,CAAC,GAAG;+BAClB,CAAC;wBAEI,uBAAqB,WAAW,CAAC,kBAAkB,CAAC;wBAC1D,IAAI,oBAAkB,EAAE;4BAChB,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;4BAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;gCAC9B,gBAAc,CAAC,CAAC;gCACd,oBAAoB,GAAG,IAAI,SAAS,CAAC;oCACzC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;wCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;wCAC5B,oBAAkB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;wCACpC,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;oCAC7B,CAAC;iCACF,CAAC,CAAC;gCACH,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;gCACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;6BAC7D;iCAAM;gCACC,WAAS,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;gCACrE,IAAI,QAAM,EAAE;oCACV,wEAAwE;oCACxE,oBAAkB,CAAC,EAAE,WAAW,EAAE,QAAM,EAAE,CAAC,CAAC;iCAC7C;6BACF;yBACF;wBAED,qBAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,EAAA;;wBAA5C,SAA4C,CAAC;wBAE7C,sBAAO,iBAAiB,EAAC;;;wBAEnB,UAAU,GAAe,OAAK,CAAC;wBACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;4BACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;6BAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;4BACxC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;wBAED,MAAM,UAAU,CAAC;;wBAEjB,0BAA0B;wBAC1B,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;4BACxC,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;6BAC3C;4BACG,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;gCAC3D,kBAAkB,GAAG,gBAAgB,CAAC,iBAAkB,CAAC,kBAAkB,CAAC,CAAC;6BAC9E;4BAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;iCAChD,IAAI,CAAC;;gCACJ,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,EAAE;gCACtE,OAAO;4BACT,CAAC,CAAC;iCACD,KAAK,CAAC,UAAC,EAAE,IAAM,CAAC,CAAC,CAAC;yBACtB;;;;;;KAEJ;IAKH,sBAAC;AAAD,CAAC,AAxMD,IAwMC;;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB;IACxC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO;QACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC1B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAgB;IAC3C,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,GAAG;QACzB,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts new file mode 100644 index 0000000..5f5f00e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map new file mode 100644 index 0000000..2da0af7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.d.ts","sourceRoot":"","sources":["../../lib/httpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEzD;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,aAAa;CAAG"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.js b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js new file mode 100644 index 0000000..8dca936 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map new file mode 100644 index 0000000..e8c060f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.js","sourceRoot":"","sources":["../../lib/httpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts new file mode 100644 index 0000000..4f95f2e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts @@ -0,0 +1,132 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: any): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header names that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map new file mode 100644 index 0000000..051d138 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.d.ts","sourceRoot":"","sources":["../../lib/httpHeaders.ts"],"names":[],"mappings":"AAUA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,oBAAY,cAAc,GAAG;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC5D;;;;OAIG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACtC;;;;OAIG;IACH,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACpC;;OAEG;IACH,UAAU,IAAI,cAAc,CAAC;IAC7B;;OAEG;IACH,YAAY,IAAI,UAAU,EAAE,CAAC;IAC7B;;OAEG;IACH,WAAW,IAAI,MAAM,EAAE,CAAC;IACxB;;OAEG;IACH,YAAY,IAAI,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;IACzB;;;OAGG;IACH,MAAM,IAAI,cAAc,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,CAAC,EAAE,GAAG,GAAG,MAAM,IAAI,eAAe,CAoBzE;AAED;;GAEG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAsC;gBAEtD,UAAU,CAAC,EAAE,cAAc;IASvC;;;;;OAKG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAOlE;;;;OAIG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKlD;;OAEG;IACI,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAI5C;;;;OAIG;IACI,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAM1C;;OAEG;IACI,UAAU,IAAI,cAAc;IASnC;;OAEG;IACI,YAAY,IAAI,UAAU,EAAE;IAQnC;;OAEG;IACI,WAAW,IAAI,MAAM,EAAE;IAS9B;;OAEG;IACI,YAAY,IAAI,MAAM,EAAE;IAS/B;;OAEG;IACI,MAAM,IAAI,cAAc;IAI/B;;OAEG;IACI,QAAQ,IAAI,MAAM;IAIzB;;OAEG;IACI,KAAK,IAAI,WAAW;CAG5B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js new file mode 100644 index 0000000..0cc2af2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +export function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; +}()); +export { HttpHeaders }; +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map new file mode 100644 index 0000000..3a4d8ac --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.js","sourceRoot":"","sources":["../../lib/httpHeaders.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4ED,MAAM,UAAU,iBAAiB,CAAC,MAAY;IAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,KAAK,CAAC;KACd;IAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;QACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;QAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;QACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;QACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;QACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH;IAGE,qBAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACI,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,yBAAG,GAAV,UAAW,UAAkB;QAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IAC5C,CAAC;IAED;;OAEG;IACI,8BAAQ,GAAf,UAAgB,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACI,4BAAM,GAAb,UAAc,UAAkB;QAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,gCAAU,GAAjB;QACE,IAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SAClD;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,kCAAY,GAAnB;QACE,IAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,iCAAW,GAAlB;QACE,IAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACI,kCAAY,GAAnB;QACE,IAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED;;OAEG;IACI,4BAAM,GAAb;QACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;IAC3B,CAAC;IAED;;OAEG;IACI,8BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAED;;OAEG;IACI,2BAAK,GAAZ;QACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAC5C,CAAC;IACH,kBAAC;AAAD,CAAC,AAxHD,IAwHC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts new file mode 100644 index 0000000..d5790e0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts @@ -0,0 +1,83 @@ +/// +import { WebResourceLike } from "./webResource"; +import { HttpHeadersLike } from "./httpHeaders"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** + * The redirected property indicates whether the response is the result of a request which was redirected. + */ + redirected?: boolean; + /** + * The url property contains the URL of the response. The value will be the final URL obtained after any redirects. + */ + url?: string; +} +/** + * The flattened response to a REST call. + * Contains the underlying HttpOperationResponse as well as + * the merged properties of the parsedBody, parsedHeaders, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map new file mode 100644 index 0000000..778b5e0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.d.ts","sourceRoot":"","sources":["../../lib/httpOperationResponse.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;CAC1B;AAED,OAAO,CAAC,MAAM,CAAC;IACb;;;OAGG;IACH,UAAU,IAAI;KAAG;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAsB,SAAQ,YAAY;IACzD;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAEvC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;OAEG;IACH,UAAU,CAAC,EAAE,GAAG,CAAC;IAEjB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAE3C;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IAErB;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,SAAS,EAAE,qBAAqB,CAAC;IAEjC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js new file mode 100644 index 0000000..bdb3308 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map new file mode 100644 index 0000000..14302d6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.js","sourceRoot":"","sources":["../../lib/httpOperationResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts new file mode 100644 index 0000000..7a13ab2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map new file mode 100644 index 0000000..91f637c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.d.ts","sourceRoot":"","sources":["../../lib/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,oBAAoB;IAC9B;;OAEG;IACH,GAAG,IAAA;IAEH;;OAEG;IACH,KAAK,IAAA;IAEL;;OAEG;IACH,OAAO,IAAA;IAEP;;OAEG;IACH,IAAI,IAAA;CACL"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js new file mode 100644 index 0000000..8971a06 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map new file mode 100644 index 0000000..d933399 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.js","sourceRoot":"","sources":["../../lib/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,MAAM,CAAN,IAAY,oBAoBX;AApBD,WAAY,oBAAoB;IAC9B;;OAEG;IACH,6DAAG,CAAA;IAEH;;OAEG;IACH,iEAAK,CAAA;IAEL;;OAEG;IACH,qEAAO,CAAA;IAEP;;OAEG;IACH,+DAAI,CAAA;AACN,CAAC,EApBW,oBAAoB,KAApB,oBAAoB,QAoB/B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts new file mode 100644 index 0000000..fdbf56c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map new file mode 100644 index 0000000..eac81c6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.d.ts","sourceRoot":"","sources":["../../lib/httpPipelineLogger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,eAAe,EAAE,oBAAoB,CAAC;IAEtC;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,yBAA0B,YAAW,kBAAkB;IAK/C,eAAe,EAAE,oBAAoB;IAJxD;;;OAGG;gBACgB,eAAe,EAAE,oBAAoB;IAExD;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAgB3D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js new file mode 100644 index 0000000..a8b1cb5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +var ConsoleHttpPipelineLogger = /** @class */ (function () { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + function ConsoleHttpPipelineLogger(minimumLogLevel) { + this.minimumLogLevel = minimumLogLevel; + } + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + ConsoleHttpPipelineLogger.prototype.log = function (logLevel, message) { + var logMessage = HttpPipelineLogLevel[logLevel] + ": " + message; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + }; + return ConsoleHttpPipelineLogger; +}()); +export { ConsoleHttpPipelineLogger }; +//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map new file mode 100644 index 0000000..00f17be --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.js","sourceRoot":"","sources":["../../lib/httpPipelineLogger.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAoB9D;;GAEG;AACH;IACE;;;OAGG;IACH,mCAAmB,eAAqC;QAArC,oBAAe,GAAf,eAAe,CAAsB;IAAG,CAAC;IAE5D;;;;OAIG;IACH,uCAAG,GAAH,UAAI,QAA8B,EAAE,OAAe;QACjD,IAAM,UAAU,GAAM,oBAAoB,CAAC,QAAQ,CAAC,UAAK,OAAS,CAAC;QACnE,QAAQ,QAAQ,EAAE;YAChB,KAAK,oBAAoB,CAAC,KAAK;gBAC7B,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC1B,MAAM;YAER,KAAK,oBAAoB,CAAC,OAAO;gBAC/B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACzB,MAAM;YAER,KAAK,oBAAoB,CAAC,IAAI;gBAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;gBACxB,MAAM;SACT;IACH,CAAC;IACH,gCAAC;AAAD,CAAC,AA5BD,IA4BC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts new file mode 100644 index 0000000..1244b50 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts @@ -0,0 +1,42 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, AbortSignalLike, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { CommonRequestInfo, CommonRequestInit, CommonResponse } from "./fetchHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { AgentSettings, ProxySettings, ServiceClient, ServiceClientOptions, flattenResponse, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { TelemetryInfo, userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { DeserializationContentTypes, deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, delay, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { TokenResponse } from "./credentials/tokenResponse"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; +//# sourceMappingURL=msRest.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map new file mode 100644 index 0000000..7737b32 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.d.ts","sourceRoot":"","sources":["../../lib/msRest.ts"],"names":[],"mappings":";AAKA,OAAO,EACL,WAAW,EACX,eAAe,EACf,eAAe,EACf,qBAAqB,EACrB,WAAW,EACX,cAAc,EACd,kBAAkB,EAClB,qBAAqB,EACrB,eAAe,GAChB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACzF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACzF,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC5F,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACrB,aAAa,GACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EACL,aAAa,EACb,aAAa,EACb,aAAa,EACb,oBAAoB,EACpB,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACpB,wBAAwB,GACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,aAAa,EACb,eAAe,EACf,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,2BAA2B,EAC3B,qBAAqB,EACrB,uBAAuB,GACxB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,kBAAkB,EAClB,cAAc,EACd,MAAM,EACN,UAAU,EACV,eAAe,EACf,cAAc,EACd,gBAAgB,EAChB,UAAU,EACV,iBAAiB,EACjB,wBAAwB,EACxB,UAAU,EACV,iBAAiB,EACjB,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,KAAK,EACL,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EACT,eAAe,EACf,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAG7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,MAAM,iCAAiC,CAAC;AAC7F,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAClF,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,8BAA8B,EAAE,MAAM,mDAAmD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.js b/node_modules/@azure/ms-rest-js/es/lib/msRest.js new file mode 100644 index 0000000..ba1ead7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/// +export { WebResource, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpHeaders } from "./httpHeaders"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { ServiceClient, flattenResponse, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { MapperType, Serializer, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, delay, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +// Credentials +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; +//# sourceMappingURL=msRest.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map b/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map new file mode 100644 index 0000000..038a369 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.js","sourceRoot":"","sources":["../../lib/msRest.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,yCAAyC;AAEzC,OAAO,EACL,WAAW,GASZ,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,OAAO,EAAc,WAAW,EAAmC,MAAM,eAAe,CAAC;AAGzF,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAUxC,OAAO,EAGL,aAAa,EAEb,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EACL,iBAAiB,EAGjB,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAmB,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAEL,eAAe,EACf,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAEL,qBAAqB,EACrB,uBAAuB,GACxB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EAcV,UAAU,EAEV,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,KAAK,EACL,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EAET,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAE7C,cAAc;AACd,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAElE,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AAEpE,OAAO,EAAE,8BAA8B,EAAE,MAAM,mDAAmD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts new file mode 100644 index 0000000..163a678 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts @@ -0,0 +1,9 @@ +import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient } from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class NodeFetchHttpClient extends FetchHttpClient { + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + prepareRequest(httpRequest: WebResourceLike): Promise>; + processRequest(_operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map new file mode 100644 index 0000000..dc6d35f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAOA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAGhD,qBAAa,mBAAoB,SAAQ,eAAe;IAChD,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIlF,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAkC3E,cAAc,CAAC,kBAAkB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAI/E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js new file mode 100644 index 0000000..06047a6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import * as http from "http"; +import * as https from "https"; +import node_fetch from "node-fetch"; +import { FetchHttpClient, } from "./fetchHttpClient"; +import { createProxyAgent } from "./proxyAgent"; +var NodeFetchHttpClient = /** @class */ (function (_super) { + __extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + return _super !== null && _super.apply(this, arguments) || this; + } + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; + }); + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var requestInit, _a, httpAgent, httpsAgent, tunnel, options, agent; + return __generator(this, function (_b) { + requestInit = {}; + if (httpRequest.agentSettings) { + _a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } + else if (httpAgent) { + requestInit.agent = httpAgent; + } + } + else if (httpRequest.proxySettings) { + tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + requestInit.agent = tunnel.agent; + } + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } + else { + options = { keepAlive: true }; + agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + return [2 /*return*/, requestInit]; + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (_operationResponse) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + /* no_op */ + return [2 /*return*/]; + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); +export { NodeFetchHttpClient }; +//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map new file mode 100644 index 0000000..4fe3165 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAEpC,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,gBAAgB,EAAc,MAAM,cAAc,CAAC;AAE5D;IAAyC,uCAAe;IAAxD;;IA2CA,CAAC;IA1CO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;gBACzC,WAAW,GAA2C,EAAE,CAAC;gBAE/D,IAAI,WAAW,CAAC,aAAa,EAAE;oBACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;oBACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;wBACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;qBAChC;yBAAM,IAAI,SAAS,EAAE;wBACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;qBAC/B;iBACF;qBAAM,IAAI,WAAW,CAAC,aAAa,EAAE;oBAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;oBACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;iBAClC;gBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;oBAClC,IAAI,WAAW,CAAC,KAAK,EAAE;wBACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;qBACpC;yBAAM;wBACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;wBACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;4BAC/C,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC;4BAC1B,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;wBAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;qBAC3B;iBACF;gBAED,sBAAO,WAAW,EAAC;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,kBAAyC;;;gBAC5D,WAAW;gBACX,sBAAO;;;KACR;IACH,0BAAC;AAAD,CAAC,AA3CD,CAAyC,eAAe,GA2CvD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts new file mode 100644 index 0000000..7eadcda --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map new file mode 100644 index 0000000..e05a58f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.d.ts","sourceRoot":"","sources":["../../lib/operationArguments.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,CAAC,aAAa,EAAE,MAAM,GAAG,GAAG,CAAC;IAE7B;;OAEG;IACH,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js new file mode 100644 index 0000000..c8bbde7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map new file mode 100644 index 0000000..ee2fe5c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.js","sourceRoot":"","sources":["../../lib/operationArguments.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts new file mode 100644 index 0000000..f5b7802 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts @@ -0,0 +1,51 @@ +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { Mapper } from "./serializer"; +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map new file mode 100644 index 0000000..2cc31fa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.d.ts","sourceRoot":"","sources":["../../lib/operationParameter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,oBAAY,aAAa,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,aAAa,CAAA;CAAE,CAAC;AAE1F;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,EAAE,aAAa,CAAC;IAE7B;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,kBAAkB;IAC/D;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,kBAAkB;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,qBAAqB,CAAC;CAC1C;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,kBAAkB,GAAG,MAAM,CAEhF;AAED,wBAAgB,8BAA8B,CAC5C,aAAa,EAAE,aAAa,EAC5B,MAAM,EAAE,MAAM,GACb,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js new file mode 100644 index 0000000..8ab319a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +export function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map new file mode 100644 index 0000000..ba98f5f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.js","sourceRoot":"","sources":["../../lib/operationParameter.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAkD/F;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,MAAM,UAAU,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts new file mode 100644 index 0000000..f99c600 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts @@ -0,0 +1,15 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; +} +//# sourceMappingURL=operationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map new file mode 100644 index 0000000..d96474a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.d.ts","sourceRoot":"","sources":["../../lib/operationResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js new file mode 100644 index 0000000..b6e3281 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map new file mode 100644 index 0000000..a31b9a5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.js","sourceRoot":"","sources":["../../lib/operationResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts new file mode 100644 index 0000000..9be1307 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts @@ -0,0 +1,68 @@ +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { OperationResponse } from "./operationResponse"; +import { Serializer } from "./serializer"; +import { HttpMethods } from "./webResource"; +/** + * A specification that defines an operation. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +export declare function isStreamOperation(operationSpec: OperationSpec): boolean; +//# sourceMappingURL=operationSpec.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map new file mode 100644 index 0000000..f2d4efb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.d.ts","sourceRoot":"","sources":["../../lib/operationSpec.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAE5C;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,WAAW,CAAC;IAEjC;;;;OAIG;IACH,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAE9B;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAE1C;;OAEG;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IAE9D;;OAEG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,aAAa,CAAC,uBAAuB,CAAC,CAAC;IAElE;;;OAGG;IACH,QAAQ,CAAC,gBAAgB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAE9D;;;OAGG;IACH,QAAQ,CAAC,kBAAkB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAEhE;;;OAGG;IACH,QAAQ,CAAC,SAAS,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,iBAAiB,CAAA;KAAE,CAAC;CACnE;AAED,wBAAgB,iBAAiB,CAAC,aAAa,EAAE,aAAa,GAAG,OAAO,CAavE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js new file mode 100644 index 0000000..06b8223 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { MapperType } from "./serializer"; +export function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; +} +//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map new file mode 100644 index 0000000..667db1a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.js","sourceRoot":"","sources":["../../lib/operationSpec.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAQ/F,OAAO,EAAE,UAAU,EAAc,MAAM,cAAc,CAAC;AA2EtD,MAAM,UAAU,iBAAiB,CAAC,aAA4B;IAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QACjF,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,GAAG,IAAI,CAAC;YACd,MAAM;SACP;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts new file mode 100644 index 0000000..7893df0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts @@ -0,0 +1,10 @@ +import { AgentSettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory; +export declare class AgentPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=agentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map new file mode 100644 index 0000000..eefa217 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB;IAKjE,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js new file mode 100644 index 0000000..ac61fd5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +var agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); +export function agentPolicy(_agentSettings) { + return { + create: function (_nextPolicy, _options) { + throw agentNotSupportedInBrowser; + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw agentNotSupportedInBrowser; + return _this; + } + AgentPolicy.prototype.sendRequest = function (_request) { + throw agentNotSupportedInBrowser; + }; + return AgentPolicy; +}(BaseRequestPolicy)); +export { AgentPolicy }; +//# sourceMappingURL=agentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map new file mode 100644 index 0000000..eec280a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAChD,qBAAY,UAAyB,EAAE,OAAiC;QAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,MAAM,0BAA0B,CAAC;;IACnC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;IACH,kBAAC;AAAD,CAAC,AATD,CAAiC,iBAAiB,GASjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts new file mode 100644 index 0000000..84d197e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts @@ -0,0 +1,11 @@ +import { AgentSettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory; +export declare class AgentPolicy extends BaseRequestPolicy { + agentSettings: AgentSettings; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, agentSettings: AgentSettings); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=agentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map new file mode 100644 index 0000000..429e0a5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD,wBAAgB,WAAW,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAM/E;AAED,qBAAa,WAAY,SAAQ,iBAAiB;IAChD,aAAa,EAAE,aAAa,CAAC;gBAG3B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,aAAa,EAAE,aAAa;IAMvB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js new file mode 100644 index 0000000..4cdfb5a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function agentPolicy(agentSettings) { + return { + create: function (nextPolicy, options) { + return new AgentPolicy(nextPolicy, options, agentSettings); + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options, agentSettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.agentSettings = agentSettings; + return _this; + } + AgentPolicy.prototype.sendRequest = function (request) { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + }; + return AgentPolicy; +}(BaseRequestPolicy)); +export { AgentPolicy }; +//# sourceMappingURL=agentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map new file mode 100644 index 0000000..15d82ec --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,UAAU,WAAW,CAAC,aAA6B;IACvD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;IACrC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,kBAAC;AAAD,CAAC,AAlBD,CAAiC,iBAAiB,GAkBjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts new file mode 100644 index 0000000..de2ef2c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts @@ -0,0 +1,38 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + constructor(nextPolicy: RequestPolicy, deserializationContentTypes: DeserializationContentTypes | undefined, options: RequestPolicyOptionsLike); + sendRequest(request: WebResourceLike): Promise; +} +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map new file mode 100644 index 0000000..f5771b5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAOjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB;;;GAGG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAEhB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC;CAChB;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,2BAA2B,CAAC,EAAE,2BAA2B,GACxD,oBAAoB,CAMtB;AAED,eAAO,MAAM,uBAAuB,UAAoC,CAAC;AACzE,eAAO,MAAM,sBAAsB,UAA8C,CAAC;AAElF;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,SAAgB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3C,SAAgB,eAAe,EAAE,MAAM,EAAE,CAAC;gBAGxC,UAAU,EAAE,aAAa,EACzB,2BAA2B,EAAE,2BAA2B,GAAG,SAAS,EACpE,OAAO,EAAE,wBAAwB;IAUtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAOnF;AAsCD,wBAAgB,uBAAuB,CACrC,gBAAgB,EAAE,MAAM,EAAE,EAC1B,eAAe,EAAE,MAAM,EAAE,EACzB,QAAQ,EAAE,qBAAqB,GAC9B,OAAO,CAAC,qBAAqB,CAAC,CA4HhC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js new file mode 100644 index 0000000..833bb48 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import { isStreamOperation } from "../operationSpec"; +import { RestError } from "../restError"; +import { MapperType } from "../serializer"; +import * as utils from "../util/utils"; +import { parseXML } from "../util/xml"; +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} +export var defaultJsonContentTypes = ["application/json", "text/json"]; +export var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +var DeserializationPolicy = /** @class */ (function (_super) { + __extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); +export { DeserializationPolicy }; +function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = utils.stripRequest(parsedResponse.request); + error.response = utils.stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = utils.stripRequest(parsedResponse.request); + restError.response = utils.stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map new file mode 100644 index 0000000..44123c3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.js","sourceRoot":"","sources":["../../../lib/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAI/F,OAAO,EAAiB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAU,UAAU,EAAE,MAAM,eAAe,CAAC;AACnD,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AACvC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAoBzB;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,2BAAyD;IAEzD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,MAAM,CAAC,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF;;;GAGG;AACH;IAA2C,yCAAiB;IAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;QAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;QAJC,KAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,CAAC,IAAI,uBAAuB,CAAC;QAC/F,KAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,CAAC,IAAI,sBAAsB,CAAC;;IAC/F,CAAC;IAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;gBAC/C,sBAAO,IAAI,CAAC,WAAW;yBACpB,WAAW,CAAC,OAAO,CAAC;yBACpB,IAAI,CAAC,UAAC,QAA+B;wBACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;oBAA9E,CAA8E,CAC/E,EAAC;;;KACL;IACH,4BAAC;AAAD,CAAC,AAxBD,CAA2C,iBAAiB,GAwB3D;;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;IAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;QAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;QAC7E,IAAI,iBAAiB,EAAE;YACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;YACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;gBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;gBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;gBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;oBAChC,CAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;gBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;gBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;oBAC5D,CAAC,CAAC,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;oBACvC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;gBACnB,IAAI,CAAC,oBAAoB,EAAE;oBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;oBAC/E,IAAI,mBAAmB,EAAE;wBACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;4BAClE,CAAC,CAAC,6BAA2B,UAAY;4BACzC,CAAC,CAAE,cAAc,CAAC,UAAqB,CAAC;wBAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;wBACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;wBAC9B,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;wBAC3D,KAAK,CAAC,QAAQ,GAAG,KAAK,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC;wBAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;wBAC5E,IAAI;4BACF,IAAI,mBAAmB,EAAE;gCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;gCACjC,IACE,yBAAyB;oCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;oCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCACjD;oCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;wCAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;qCACvC;oCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;wCAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;qCAC7C;iCACF;qCAAM;oCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;oCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCAC3C;oCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;wCACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;qCACvC;iCACF;gCAED,IAAI,yBAAyB,EAAE;oCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;oCAClD,IACE,aAAa,CAAC,KAAK;wCACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;wCACA,kBAAkB;4CAChB,OAAO,mBAAmB,KAAK,QAAQ;gDACrC,CAAC,CAAC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;gDAChE,CAAC,CAAC,EAAE,CAAC;qCACV;oCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;iCACH;6BACF;yBACF;wBAAC,OAAO,YAAY,EAAE;4BACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;yBAC7J;wBACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;qBAC9B;iBACF;qBAAM,IAAI,YAAY,EAAE;oBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;wBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;wBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpF,kBAAkB;gCAChB,OAAO,kBAAkB,KAAK,QAAQ;oCACpC,CAAC,CAAC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;oCAC7D,CAAC,CAAC,EAAE,CAAC;yBACV;wBACD,IAAI;4BACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;yBACH;wBAAC,OAAO,KAAK,EAAE;4BACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;4BACF,SAAS,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC/D,SAAS,CAAC,QAAQ,GAAG,KAAK,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC;4BACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;yBAClC;qBACF;yBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;wBAC9C,uGAAuG;wBACvG,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;qBAC7E;oBAED,IAAI,YAAY,CAAC,aAAa,EAAE;wBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;qBACH;iBACF;aACF;SACF;QACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;IACzC,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;IAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;QACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;QACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC;IAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;QACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;YAC9C,CAAC,CAAC,EAAE;YACJ,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,EAAvB,CAAuB,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAA1C,CAA0C,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;gBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAzC,CAAyC,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;iBAClB,IAAI,CAAC,UAAC,IAAI;gBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;YAC3B,CAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 0000000..3442911 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,48 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The minimum retry interval in milliseconds. + */ + minRetryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map new file mode 100644 index 0000000..a972a4c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAGzB,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAOD;;;GAGG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IAEzB;;;;;;;;OAQG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAgBpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js new file mode 100644 index 0000000..82029bf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { RestError } from "../restError"; +export function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +var ExponentialRetryPolicy = /** @class */ (function (_super) { + __extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +export { ExponentialRetryPolicy }; +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return utils + .delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map new file mode 100644 index 0000000..f424d43 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAczC,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;AACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEnD;;;GAGG;AACH;IAA4C,0CAAiB;IAkB3D;;;;;;;;OAQG;IACH,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAXC,SAAS,QAAQ,CAAC,CAAM;YACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;QAC/B,CAAC;QACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;QACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;;IACxC,CAAC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAKC;QAJC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAA9B,CAA8B,CAAC;aAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,EAAtD,CAAsD,CAAC,CAAC;IAC9E,CAAC;IACH,6BAAC;AAAD,CAAC,AAvDD,CAA4C,iBAAiB,GAuD5D;;AAED;;;;;;;GAOG;AACH,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;IAEpB,IACE,UAAU,IAAI,SAAS;QACvB,CAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;QACxC,UAAU,KAAK,GAAG;QAClB,UAAU,KAAK,GAAG,EAClB;QACA,OAAO,KAAK,CAAC;KACd;IAED,IAAI,YAAoB,CAAC;IACzB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACH,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;QAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;QAC7E,OAAO,KAAK;aACT,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC;aAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAA/C,CAA+C,CAAC;aAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,EAAjD,CAAiD,CAAC;aAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,EAAhD,CAAgD,CAAC,CAAC;KACrE;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;QACjD,qFAAqF;QACrF,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM;QACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAClC;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 0000000..d08160d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map new file mode 100644 index 0000000..fb5c7a1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,6BAA6B,CAC3C,mBAAmB,SAA2B,GAC7C,oBAAoB,CAMtB;AAED,qBAAa,6BAA8B,SAAQ,iBAAiB;IAIhE,OAAO,CAAC,oBAAoB;gBAF5B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACzB,oBAAoB,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js new file mode 100644 index 0000000..d7c3118 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + __extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, utils.generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); +export { GenerateClientRequestIdPolicy }; +//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map new file mode 100644 index 0000000..56227fa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.js","sourceRoot":"","sources":["../../../lib/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,6BAA6B,CAC3C,mBAA8C;IAA9C,oCAAA,EAAA,8CAA8C;IAE9C,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAmD,iDAAiB;IAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;QAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;IAGtC,CAAC;IAEM,mDAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;SACtE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,oCAAC;AAAD,CAAC,AAfD,CAAmD,iBAAiB,GAenE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts new file mode 100644 index 0000000..6239967 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function logPolicy(logger?: any): RequestPolicyFactory; +export declare class LogPolicy extends BaseRequestPolicy { + logger?: any; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, logger?: any); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=logPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map new file mode 100644 index 0000000..4108f58 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/logPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,SAAS,CAAC,MAAM,GAAE,GAAiB,GAAG,oBAAoB,CAMzE;AAED,qBAAa,SAAU,SAAQ,iBAAiB;IAC9C,MAAM,CAAC,EAAE,GAAG,CAAC;gBAGX,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,MAAM,GAAE,GAAiB;IAMpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js new file mode 100644 index 0000000..cfa3e24 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} +var LogPolicy = /** @class */ (function (_super) { + __extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; +}(BaseRequestPolicy)); +export { LogPolicy }; +function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map new file mode 100644 index 0000000..ef5ba96 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.js","sourceRoot":"","sources":["../../../lib/policies/logPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAI/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,SAAS,CAAC,MAAyB;IAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;IACjD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACpD,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAA+B,6BAAiB;IAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;IACvB,CAAC;IAEM,+BAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAEC;QADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,EAA3B,CAA2B,CAAC,CAAC;IAC/F,CAAC;IACH,gBAAC;AAAD,CAAC,AAfD,CAA+B,iBAAiB,GAe/C;;AAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;IAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;IAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;IAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;IACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;IAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 0000000..3ff8522 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map new file mode 100644 index 0000000..39d10a8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAOlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAQzD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js new file mode 100644 index 0000000..2fe6588 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export function getDefaultUserAgentKey() { + return "x-ms-command-name"; +} +export function getPlatformSpecificData() { + var navigator = self.navigator; + var osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map new file mode 100644 index 0000000..d96a097 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAc/F,MAAM,UAAU,sBAAsB;IACpC,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,IAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;IAChD,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;KAChE,CAAC;IAEF,OAAO,CAAC,MAAM,CAAC,CAAC;AAClB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 0000000..1fac888 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map new file mode 100644 index 0000000..9ad4183 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAYzD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js new file mode 100644 index 0000000..4062243 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as os from "os"; +import { Constants } from "../util/constants"; +export function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +export function getPlatformSpecificData() { + var runtimeInfo = { + key: "Node", + value: process.version, + }; + var osInfo = { + key: "OS", + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")", + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map new file mode 100644 index 0000000..8db5ec5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,MAAM,UAAU,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,IAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,MAAI,EAAE,CAAC,IAAI,EAAE,SAAI,EAAE,CAAC,IAAI,EAAE,SAAI,EAAE,CAAC,OAAO,EAAE,MAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts new file mode 100644 index 0000000..5c2b574 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { ProxySettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map new file mode 100644 index 0000000..ad17d1c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAErF;AAED,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB;IAKjE,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js new file mode 100644 index 0000000..c8144de --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +var proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); +export function getDefaultProxySettings(_proxyUrl) { + return undefined; +} +export function proxyPolicy(_proxySettings) { + return { + create: function (_nextPolicy, _options) { + throw proxyNotSupportedInBrowser; + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw proxyNotSupportedInBrowser; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (_request) { + throw proxyNotSupportedInBrowser; + }; + return ProxyPolicy; +}(BaseRequestPolicy)); +export { ProxyPolicy }; +//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map new file mode 100644 index 0000000..7fe9c53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,uBAAuB,CAAC,SAAkB;IACxD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAChD,qBAAY,UAAyB,EAAE,OAAiC;QAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,MAAM,0BAA0B,CAAC;;IACnC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;IACH,kBAAC;AAAD,CAAC,AATD,CAAiC,iBAAiB,GASjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts new file mode 100644 index 0000000..565fce8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts @@ -0,0 +1,24 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * @internal + */ +export declare const noProxyList: string[]; +/** + * @internal + */ +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, proxySettings: ProxySettings); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map new file mode 100644 index 0000000..ab11973 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD;;GAEG;AACH,eAAO,MAAM,WAAW,EAAE,MAAM,EAAkB,CAAC;AAGnD;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAOpE;AA+CD;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,EAAE,CAUtC;AA4BD,wBAAgB,uBAAuB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAiBpF;AAED,wBAAgB,WAAW,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAS/E;AAED,qBAAa,WAAY,SAAQ,iBAAiB;IAChD,aAAa,EAAE,aAAa,CAAC;gBAG3B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,aAAa,EAAE,aAAa;IAMvB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js new file mode 100644 index 0000000..2f0ef86 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; +/** + * @internal + */ +export var noProxyList = loadNoProxy(); +var byPassedList = new Map(); +/** + * @internal + */ +export function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { + if (noProxyList.length === 0) { + return false; + } + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +export function loadNoProxy() { + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); + } + return []; +} +/** + * @internal + */ +function extractAuthFromUrl(url) { + var atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth, + }; +} +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username: username, + password: password, + }; +} +export function proxyPolicy(proxySettings) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + }; + return ProxyPolicy; +}(BaseRequestPolicy)); +export { ProxyPolicy }; +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map new file mode 100644 index 0000000..86f8ba6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.js","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC;;GAEG;AACH,MAAM,CAAC,IAAM,WAAW,GAAa,WAAW,EAAE,CAAC;AACnD,IAAM,YAAY,GAAyB,IAAI,GAAG,EAAE,CAAC;AAErD;;GAEG;AACH,MAAM,UAAU,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,IAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,IAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,IAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED,iEAAiE;AACjE,2FAA2F;AAC3F,mMAAmM;AACnM,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,IAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC/B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAsB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA9B,IAAM,OAAO,oBAAA;QAChB,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACtB,mEAAmE;YACnE,mDAAmD;YACnD,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,IAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,IAAI,EAAE,EAAX,CAAW,CAAC;aAC1B,MAAM,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,MAAM,EAAX,CAAW,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,SAAS,kBAAkB,CACzB,GAAW;IAEX,IAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,IAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,IAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,IAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,IAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,IAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,IAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACpE,IAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1E,IAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,cAAc,gBAAA;KACf,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAEK,IAAA,KAAyC,kBAAkB,CAAC,QAAQ,CAAC,EAAnE,QAAQ,cAAA,EAAE,QAAQ,cAAA,EAAE,cAAc,oBAAiC,CAAC;IAC5E,IAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,IAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ,UAAA;QACR,QAAQ,UAAA;KACT,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,aAA6B;IACvD,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;IACrC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACtD,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,kBAAC;AAAD,CAAC,AAlBD,CAAiC,iBAAiB,GAkBjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts new file mode 100644 index 0000000..308149b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts @@ -0,0 +1,18 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + handleRedirects: boolean; + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map new file mode 100644 index 0000000..94f16bb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/redirectPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB;;GAEG;AACH,MAAM,WAAW,eAAe;IAI9B,eAAe,EAAE,OAAO,CAAC;IAMzB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,sBAAsB,EAAE,eAGpC,CAAC;AAEF,wBAAgB,cAAc,CAAC,cAAc,SAAK,GAAG,oBAAoB,CAMxE;AAED,qBAAa,cAAe,SAAQ,iBAAiB;IAIjD,QAAQ,CAAC,UAAU;gBAFnB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACxB,UAAU,SAAK;IAKnB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js new file mode 100644 index 0000000..f6eda4b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { URLBuilder } from "../url"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +export function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +var RedirectPolicy = /** @class */ (function (_super) { + __extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); +export { RedirectPolicy }; +function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); +} +function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map new file mode 100644 index 0000000..af97d7c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.js","sourceRoot":"","sources":["../../../lib/policies/redirectPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAkBzB,MAAM,CAAC,IAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF,MAAM,UAAU,cAAc,CAAC,cAAmB;IAAnB,+BAAA,EAAA,mBAAmB;IAChD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACjE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAoC,kCAAiB;IACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;QAAf,2BAAA,EAAA,eAAe;QAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,gBAAU,GAAV,UAAU,CAAK;;IAG1B,CAAC;IAEM,oCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,EAAjC,CAAiC,CAAC,CAAC;IAC3D,CAAC;IACH,qBAAC;AAAD,CAAC,AAdD,CAAoC,iBAAiB,GAcpD;;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;IACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;QACd,CAAC,MAAM,KAAK,GAAG;YACb,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACpE,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;QACjB,CAAC,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;YAC9E,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;QACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;QAEjC,uEAAuE;QACvE,+EAA+E;QAC/E,+GAA+G;QAC/G,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;YACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,EAA/C,CAA+C,CAAC;aAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,EAAhC,CAAgC,CAAC,CAAC;KACpD;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;IACvE,oEAAoE;IACpE,wCAAwC;IACxC,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;QACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;KACzB;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts new file mode 100644 index 0000000..ff3e61d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts @@ -0,0 +1,71 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +export interface RequestPolicy { + sendRequest(httpRequest: WebResourceLike): Promise; +} +export declare abstract class BaseRequestPolicy implements RequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptionsLike; + protected constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike); + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions implements RequestPolicyOptionsLike { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map new file mode 100644 index 0000000..41e5568 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/requestPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAC/D,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,oBAAoB,GAAG;IACjC,MAAM,CAAC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,GAAG,aAAa,CAAC;CACrF,CAAC;AAEF,MAAM,WAAW,aAAa;IAC5B,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;CAC3E;AAED,8BAAsB,iBAAkB,YAAW,aAAa;IAE5D,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAF7C,SAAS,aACE,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,wBAAwB;aAG7B,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAEzF;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAIzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAGlE;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO,CAAC;IAEnD;;;;;OAKG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,oBAAqB,YAAW,wBAAwB;IACvD,OAAO,CAAC,OAAO,CAAC;gBAAR,OAAO,CAAC,gCAAoB;IAEhD;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAQzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAKlE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js new file mode 100644 index 0000000..16fb9b3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; +}()); +export { BaseRequestPolicy }; +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; +}()); +export { RequestPolicyOptions }; +//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map new file mode 100644 index 0000000..506b5bb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.js","sourceRoot":"","sources":["../../../lib/policies/requestPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAI/F,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAc/D;IACE,2BACW,WAA0B,EAC1B,QAAkC;QADlC,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAA0B;IAC1C,CAAC;IAIJ;;;;OAIG;IACI,qCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACI,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;IACH,wBAAC;AAAD,CAAC,AA1BD,IA0BC;;AAsBD;;GAEG;AACH;IACE,8BAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;IAAG,CAAC;IAEpD;;;;OAIG;IACI,wCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,CACL,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAK,oBAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,CACzC,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;IACH,CAAC;IACH,2BAAC;AAAD,CAAC,AA3BD,IA2BC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 0000000..305f617 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map new file mode 100644 index 0000000..51bbc8a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,oBAAoB,CAAC,YAAY,SAAK,GAAG,oBAAoB,CAM5E;AAED,qBAAa,oBAAqB,SAAQ,iBAAiB;IAIvD,QAAQ,CAAC,aAAa;gBAFtB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACxB,aAAa,SAAK;IAKtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js new file mode 100644 index 0000000..ad7107f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js @@ -0,0 +1,163 @@ +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +var RPRegistrationPolicy = /** @class */ (function (_super) { + __extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); +export { RPRegistrationPolicy }; +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; +} +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return utils + .delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); +} +//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map new file mode 100644 index 0000000..848ce2f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.js","sourceRoot":"","sources":["../../../lib/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,oBAAoB,CAAC,YAAiB;IAAjB,6BAAA,EAAA,iBAAiB;IACpD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAA0C,wCAAiB;IACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;QAAlB,8BAAA,EAAA,kBAAkB;QAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,mBAAa,GAAb,aAAa,CAAK;;IAG7B,CAAC;IAEM,0CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAzC,CAAyC,CAAC,CAAC;IACnE,CAAC;IACH,2BAAC;AAAD,CAAC,AAdD,CAA0C,iBAAiB,GAc1D;;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,OAAO,CACL,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;gBAC5C,wFAAwF;gBACxF,kFAAkF;gBAClF,uFAAuF;iBACtF,KAAK,CAAC,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;iBAClB,IAAI,CAAC,UAAC,kBAAkB;gBACvB,IAAI,kBAAkB,EAAE;oBACtB,2EAA2E;oBAC3E,0EAA0E;oBAC1E,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC,CACL,CAAC;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;IAAnB,4BAAA,EAAA,mBAAmB;IAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;IAED,wEAAwE;IACxE,iDAAiD;IACjD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;IAEvE,uCAAuC;IACvC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;GAKG;AACH,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAG,EAAE;YACZ,cAAc;SACf;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;IACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;IAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;QAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;SAC7F;QACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;IAChE,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;QACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;QAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;YACrF,OAAO,IAAI,CAAC;SACb;aAAM;YACL,OAAO,KAAK;iBACT,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;iBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,EAAnD,CAAmD,CAAC,CAAC;SACpE;IACH,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts new file mode 100644 index 0000000..b4b2f2d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts @@ -0,0 +1,12 @@ +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicyFactory, RequestPolicy, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map new file mode 100644 index 0000000..91ae8e8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/signingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,wBAAwB,EAAE,MAAM,yCAAyC,CAAC;AACnF,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,aAAa,EACb,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,aAAa,CAC3B,sBAAsB,EAAE,wBAAwB,GAC/C,oBAAoB,CAMtB;AAED,qBAAa,aAAc,SAAQ,iBAAiB;IAIzC,sBAAsB,EAAE,wBAAwB;gBAFvD,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EAC1B,sBAAsB,EAAE,wBAAwB;IAKzD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIxD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js new file mode 100644 index 0000000..6a3be13 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +var SigningPolicy = /** @class */ (function (_super) { + __extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); +export { SigningPolicy }; +//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map new file mode 100644 index 0000000..1cc0247 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.js","sourceRoot":"","sources":["../../../lib/policies/signingPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAK/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;QACxE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAmC,iCAAiB;IAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;QAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;IAGzD,CAAC;IAED,mCAAW,GAAX,UAAY,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC1D,CAAC;IAEM,mCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;YAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;QAAzC,CAAyC,CAC1C,CAAC;IACJ,CAAC;IACH,oBAAC;AAAD,CAAC,AAlBD,CAAmC,iBAAiB,GAkBnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 0000000..153e560 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + DEFAULT_CLIENT_RETRY_INTERVAL: number; + DEFAULT_CLIENT_RETRY_COUNT: number; + DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; + DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map new file mode 100644 index 0000000..c38e714 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAED;;;;;;;;;GASG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,gBAAgB,EAAE,MAAM,CAAC;IACzB,6BAA6B,SAAa;IAC1C,0BAA0B,SAAK;IAC/B,iCAAiC,SAAa;IAC9C,iCAAiC,SAAY;gBAG3C,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAgBpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js new file mode 100644 index 0000000..06b7c64 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + __extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +export { SystemErrorRetryPolicy }; +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, operationResponse, err, retryData) { + return __awaiter(this, void 0, void 0, function () { + var error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData(policy, retryData, err); + if (!(err && + err.code && + shouldRetry(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, utils.delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map new file mode 100644 index 0000000..3225fab --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAczB,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH;IAA4C,0CAAiB;IAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;QAC1C,gCAA0B,GAAG,CAAC,CAAC;QAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;QAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAI,CAAC,0BAA0B,CAAC;QAChG,KAAI,CAAC,aAAa;YAChB,OAAO,aAAa,KAAK,QAAQ,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAI,CAAC,6BAA6B,CAAC;QACzF,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;gBAClC,CAAC,CAAC,gBAAgB;gBAClB,CAAC,CAAC,KAAI,CAAC,iCAAiC,CAAC;QAC7C,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;gBAClC,CAAC,CAAC,gBAAgB;gBAClB,CAAC,CAAC,KAAI,CAAC,iCAAiC,CAAC;;IAC/C,CAAC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,EAA3C,CAA2C,CAAC,CAAC;IACnE,CAAC;IACH,6BAAC;AAAD,CAAC,AArCD,CAA4C,iBAAiB,GAqC5D;;AAED;;;;;;GAMG;AACH,SAAS,WAAW,CAAC,MAA8B,EAAE,SAAoB;IACvE,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAe,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;oBAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;yBAElD,CAAA,GAAG;wBACH,GAAG,CAAC,IAAI;wBACR,WAAW,CAAC,MAAM,EAAE,SAAS,CAAC;wBAC9B,CAAC,GAAG,CAAC,IAAI,KAAK,WAAW;4BACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;4BAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;4BAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;4BACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;oBAItB,qBAAM,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;oBAA1C,SAA0C,CAAC;oBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;oBAEvD,sBAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;oBAGrE,IAAI,GAAG,EAAE;wBACP,qFAAqF;wBACrF,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;qBACxC;oBACD,sBAAO,iBAAiB,EAAC;;;;;CAE5B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 0000000..41ea640 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyOptionsLike, RequestPolicyFactory } from "./requestPolicy"; +import { WebResourceLike } from "../webResource"; +import { HttpOperationResponse } from "../httpOperationResponse"; +/** + * Options that control how to retry on response status code 429. + */ +export interface ThrottlingRetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; +} +export declare function throttlingRetryPolicy(maxRetries?: number): RequestPolicyFactory; +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private retryLimit; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number); + sendRequest(httpRequest: WebResourceLike): Promise; + private retry; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map new file mode 100644 index 0000000..124aa8e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,wBAAwB,EACxB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAOjE;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,wBAAgB,qBAAqB,CACnC,UAAU,GAAE,MAA4B,GACvC,oBAAoB,CAMtB;AAED;;;;;GAKG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,OAAO,CAAC,UAAU,CAAS;gBAEf,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,EAAE,UAAU,EAAE,MAAM;IAK/E,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;YAMxE,KAAK;WA2BL,qBAAqB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;WAS9D,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;CAWjF"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js new file mode 100644 index 0000000..9768e05 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { delay } from "../util/utils"; +var StatusCodes = Constants.HttpConstants.StatusCodes; +var DEFAULT_RETRY_COUNT = 3; +export function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + __extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return __awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); +export { ThrottlingRetryPolicy }; +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map new file mode 100644 index 0000000..91fa369 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,MAAM,UAAU,qBAAqB,CACnC,UAAwC;IAAxC,2BAAA,EAAA,gCAAwC;IAExC,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;QACpE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH;IAA2C,yCAAiB;IAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;QAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;IAC/B,CAAC;IAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;gBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;wBACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;oBAC9C,CAAC,CAAC,EAAC;;;KACJ;IAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;wBAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;4BACvD,sBAAO,YAAY,EAAC;yBACrB;wBAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;6BAEE,CAAA,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;wBAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;6BACE,SAAS,EAAT,wBAAS;wBACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;wBAAtB,SAAsB,CAAC;wBACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;wBAArD,GAAG,GAAG,SAA+C;wBAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;4BAIxD,sBAAO,YAAY,EAAC;;;;KACrB;IAEa,2CAAqB,GAAnC,UAAoC,WAAmB;QACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;IACH,CAAC;IAEa,+CAAyB,GAAvC,UAAwC,WAAmB;QACzD,IAAI;YACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;IACH,4BAAC;AAAD,CAAC,AA7DD,CAA2C,iBAAiB,GA6D3D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts new file mode 100644 index 0000000..2ba12d4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts @@ -0,0 +1,21 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +export declare function getDefaultUserAgentValue(): string; +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptionsLike; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map new file mode 100644 index 0000000..4a3c4c1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,sBAAsB,EAA2B,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,oBAAY,aAAa,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAwB7D,eAAO,MAAM,6BAA6B,+BAAyB,CAAC;AAEpE,wBAAgB,wBAAwB,IAAI,MAAM,CAKjD;AAED,wBAAgB,eAAe,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAanF;AAED,qBAAa,eAAgB,SAAQ,iBAAiB;IAElD,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAC3C,SAAS,CAAC,SAAS,EAAE,MAAM;IAC3B,SAAS,CAAC,WAAW,EAAE,MAAM;gBAHpB,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,wBAAwB,EACjC,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAKrE,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;CASnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js new file mode 100644 index 0000000..21a4391 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { BaseRequestPolicy, } from "./requestPolicy"; +function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); +} +export var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +export function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +export function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +var UserAgentPolicy = /** @class */ (function (_super) { + __extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); +export { UserAgentPolicy }; +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map new file mode 100644 index 0000000..3e2fb85 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,SAAS,cAAc;IACrB,IAAM,aAAa,GAAG;QACpB,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,SAAS,CAAC,aAAa;KAC/B,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;IADpB,6BAAA,EAAA,kBAAkB;IAClB,+BAAA,EAAA,oBAAoB;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,UAAC,IAAI;QACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;IAC/B,CAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,MAAM,CAAC,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,MAAM,UAAU,wBAAwB;IACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,eAAe,CAAC,aAA6B;IAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,sBAAsB,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;IAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;QAChD,CAAC,CAAC,wBAAwB,EAAE;QAC5B,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAqC,mCAAiB;IACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;QAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;QANU,iBAAW,GAAX,WAAW,CAAe;QAC1B,cAAQ,GAAR,QAAQ,CAA0B;QACjC,eAAS,GAAT,SAAS,CAAQ;QACjB,iBAAW,GAAX,WAAW,CAAQ;;IAG/B,CAAC;IAED,qCAAW,GAAX,UAAY,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED,4CAAkB,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;IACH,CAAC;IACH,sBAAC;AAAD,CAAC,AAxBD,CAAqC,iBAAiB,GAwBrD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts new file mode 100644 index 0000000..3ac8405 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts @@ -0,0 +1,33 @@ +/// +import * as http from "http"; +import * as https from "https"; +import { ProxySettings } from "./serviceClient"; +import { HttpHeadersLike } from "./httpHeaders"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export interface HttpsProxyOptions { + host: string; + port: number; + localAddress?: string; + proxyAuth?: string; + headers?: { + [key: string]: any; + }; + ca?: Buffer[]; + servername?: string; + key?: Buffer; + cert?: Buffer; +} +interface HttpsOverHttpsOptions { + maxSockets?: number; + ca?: Buffer[]; + key?: Buffer; + cert?: Buffer; + proxy?: HttpsProxyOptions; +} +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: HttpsOverHttpsOptions): http.Agent | https.Agent; +export {}; +//# sourceMappingURL=proxyAgent.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map new file mode 100644 index 0000000..13222d6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.d.ts","sourceRoot":"","sources":["../../lib/proxyAgent.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAG/B,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAEhD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,oBAAY,UAAU,GAAG;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAA;CAAE,CAAC;AAC/E,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,aAAa,EAC5B,OAAO,CAAC,EAAE,eAAe,GACxB,UAAU,CA0BZ;AAID,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,iBAAiB,CAAC;CAC3B;AAED,wBAAgB,YAAY,CAC1B,cAAc,EAAE,OAAO,EACvB,YAAY,EAAE,OAAO,EACrB,aAAa,EAAE,qBAAqB,GACnC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAU1B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js new file mode 100644 index 0000000..c832234 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as tunnel from "tunnel"; +import { URLBuilder } from "./url"; +export function createProxyAgent(requestUrl, proxySettings, headers) { + var tunnelOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost(), + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = "" + proxySettings.username; + } + var requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + var isRequestHttps = requestScheme.toLowerCase() === "https"; + var proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + var isProxyHttps = proxyScheme.toLowerCase() === "https"; + var proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} +//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map new file mode 100644 index 0000000..fe48db5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.js","sourceRoot":"","sources":["../../lib/proxyAgent.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAI/F,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAGjC,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAInC,MAAM,UAAU,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,IAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY;YAC9D,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAM,aAAa,CAAC,QAAQ,SAAI,aAAa,CAAC,QAAU,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,KAAG,aAAa,CAAC,QAAU,CAAC;KAC9D;IAED,IAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IACrE,IAAM,cAAc,GAAG,aAAa,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAC/D,IAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC3E,IAAM,YAAY,GAAG,WAAW,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAE3D,IAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAwBD,MAAM,UAAU,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAAoC;IAEpC,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAO,MAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAO,MAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts new file mode 100644 index 0000000..a235f85 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts @@ -0,0 +1,11 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + Csv = ",", + Ssv = " ", + Tsv = "\t", + Pipes = "|", + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map new file mode 100644 index 0000000..7ad8dc8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.d.ts","sourceRoot":"","sources":["../../lib/queryCollectionFormat.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,qBAAqB;IAC/B,GAAG,MAAM;IACT,GAAG,MAAM;IACT,GAAG,OAAO;IACV,KAAK,MAAM;IACX,KAAK,UAAU;CAChB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js new file mode 100644 index 0000000..6128d01 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export var QueryCollectionFormat; +(function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; +})(QueryCollectionFormat || (QueryCollectionFormat = {})); +//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map new file mode 100644 index 0000000..29c806c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.js","sourceRoot":"","sources":["../../lib/queryCollectionFormat.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,MAAM,CAAN,IAAY,qBAMX;AAND,WAAY,qBAAqB;IAC/B,kCAAS,CAAA;IACT,kCAAS,CAAA;IACT,mCAAU,CAAA;IACV,oCAAW,CAAA;IACX,wCAAe,CAAA;AACjB,CAAC,EANW,qBAAqB,KAArB,qBAAqB,QAMhC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts new file mode 100644 index 0000000..7b889c4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts @@ -0,0 +1,14 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class RestError extends Error { + static readonly REQUEST_SEND_ERROR: string; + static readonly REQUEST_ABORTED_ERROR: string; + static readonly PARSE_ERROR: string; + code?: string; + statusCode?: number; + request?: WebResourceLike; + response?: HttpOperationResponse; + body?: any; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse, body?: any); +} +//# sourceMappingURL=restError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map new file mode 100644 index 0000000..d058e5a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.d.ts","sourceRoot":"","sources":["../../lib/restError.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,qBAAa,SAAU,SAAQ,KAAK;IAClC,MAAM,CAAC,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAwB;IAClE,MAAM,CAAC,QAAQ,CAAC,qBAAqB,EAAE,MAAM,CAA2B;IACxE,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAiB;IAEpD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC,IAAI,CAAC,EAAE,GAAG,CAAC;gBAET,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,EAChC,IAAI,CAAC,EAAE,GAAG;CAWb"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.js b/node_modules/@azure/ms-rest-js/es/lib/restError.js new file mode 100644 index 0000000..9062520 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +var RestError = /** @class */ (function (_super) { + __extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); +export { RestError }; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.js.map b/node_modules/@azure/ms-rest-js/es/lib/restError.js.map new file mode 100644 index 0000000..5772499 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.js","sourceRoot":"","sources":["../../lib/restError.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAK/F;IAA+B,6BAAK;IAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;QANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;QAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;IACnD,CAAC;IAzBe,4BAAkB,GAAW,oBAAoB,CAAC;IAClD,+BAAqB,GAAW,uBAAuB,CAAC;IACxD,qBAAW,GAAW,aAAa,CAAC;IAwBtD,gBAAC;CAAA,AA3BD,CAA+B,KAAK,GA2BnC;SA3BY,SAAS"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts new file mode 100644 index 0000000..2fb084f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts @@ -0,0 +1,130 @@ +export declare class Serializer { + readonly modelMappers: { + [key: string]: any; + }; + readonly isXML?: boolean | undefined; + constructor(modelMappers?: { + [key: string]: any; + }, isXML?: boolean | undefined); + validateConstraints(mapper: Mapper, value: any, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + serialize(mapper: Mapper, object: any, objectName?: string): any; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + deserialize(mapper: Mapper, responseBody: any, objectName: string): any; +} +export interface MapperConstraints { + InclusiveMaximum?: number; + ExclusiveMaximum?: number; + InclusiveMinimum?: number; + ExclusiveMinimum?: number; + MaxLength?: number; + MinLength?: number; + Pattern?: RegExp; + MaxItems?: number; + MinItems?: number; + UniqueItems?: true; + MultipleOf?: number; +} +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +export interface SimpleMapperType { + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +export interface CompositeMapperType { + name: "Composite"; + className?: string; + modelProperties?: { + [propertyName: string]: Mapper; + }; + additionalProperties?: Mapper; + uberParent?: string; + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +export interface SequenceMapperType { + name: "Sequence"; + element: Mapper; +} +export interface DictionaryMapperType { + name: "Dictionary"; + value: Mapper; +} +export interface EnumMapperType { + name: "Enum"; + allowedValues: any[]; +} +export interface BaseMapper { + xmlName?: string; + xmlIsAttribute?: boolean; + xmlElementName?: string; + xmlIsWrapped?: boolean; + readOnly?: boolean; + isConstant?: boolean; + required?: boolean; + nullable?: boolean; + serializedName?: string; + type: MapperType; + defaultValue?: any; + constraints?: MapperConstraints; +} +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +export interface PolymorphicDiscriminator { + serializedName: string; + clientName: string; + [key: string]: string; +} +export interface CompositeMapper extends BaseMapper { + type: CompositeMapperType; +} +export interface SequenceMapper extends BaseMapper { + type: SequenceMapperType; +} +export interface DictionaryMapper extends BaseMapper { + type: DictionaryMapperType; + headerCollectionPrefix?: string; +} +export interface EnumMapper extends BaseMapper { + type: EnumMapperType; +} +export interface UrlParameterValue { + value: string; + skipUrlEncoding: boolean; +} +export declare function serializeObject(toSerialize: any): any; +export declare const MapperType: { + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + Date: "Date"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map new file mode 100644 index 0000000..2516371 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.d.ts","sourceRoot":"","sources":["../../lib/serializer.ts"],"names":[],"mappings":"AAMA,qBAAa,UAAU;aAEH,YAAY,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;aACpC,KAAK,CAAC;gBADN,YAAY,GAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAO,EACzC,KAAK,CAAC,qBAAS;IAGjC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IA8DzE;;;;;;;;;;OAUG;IACH,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,UAAU,CAAC,EAAE,MAAM,GAAG,GAAG;IAmEhE;;;;;;;;;;OAUG;IACH,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,EAAE,UAAU,EAAE,MAAM,GAAG,GAAG;CA4ExE;AAipBD,MAAM,WAAW,iBAAiB;IAChC,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,IAAI,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,oBAAY,UAAU,GAClB,gBAAgB,GAChB,mBAAmB,GACnB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,CAAC;AAEnB,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EACA,WAAW,GACX,SAAS,GACT,WAAW,GACX,MAAM,GACN,UAAU,GACV,iBAAiB,GACjB,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,UAAU,GACV,MAAM,GACN,QAAQ,GACR,KAAK,CAAC;CACX;AAED,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,WAAW,CAAC;IAKlB,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,eAAe,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IACrD,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,wBAAwB,CAAC,EAAE,wBAAwB,CAAC;CACrD;AAED,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,UAAU,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,YAAY,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,aAAa,EAAE,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,IAAI,EAAE,UAAU,CAAC;IACjB,YAAY,CAAC,EAAE,GAAG,CAAC;IACnB,WAAW,CAAC,EAAE,iBAAiB,CAAC;CACjC;AAED,oBAAY,MAAM,GAAG,UAAU,GAAG,eAAe,GAAG,cAAc,GAAG,gBAAgB,GAAG,UAAU,CAAC;AAEnG,MAAM,WAAW,wBAAwB;IACvC,cAAc,EAAE,MAAM,CAAC;IACvB,UAAU,EAAE,MAAM,CAAC;IACnB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD,IAAI,EAAE,mBAAmB,CAAC;CAC3B;AAED,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD,IAAI,EAAE,oBAAoB,CAAC;IAC3B,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC;AAED,MAAM,WAAW,UAAW,SAAQ,UAAU;IAC5C,IAAI,EAAE,cAAc,CAAC;CACtB;AAED,MAAM,WAAW,iBAAiB;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,eAAe,EAAE,OAAO,CAAC;CAC1B;AAGD,wBAAgB,eAAe,CAAC,WAAW,EAAE,GAAG,GAAG,GAAG,CAqBrD;AAaD,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;CAiBrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.js b/node_modules/@azure/ms-rest-js/es/lib/serializer.js new file mode 100644 index 0000000..9e60d91 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.js @@ -0,0 +1,789 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; +}()); +export { Serializer }; +function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!utils.isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; +} +function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +// TODO: why is this here? +export function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; +} +export var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map b/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map new file mode 100644 index 0000000..cfeb735 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../../lib/serializer.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,MAAM,MAAM,eAAe,CAAC;AACxC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAEtC;IACE,oBACkB,YAAyC,EACzC,KAAe;QADf,6BAAA,EAAA,iBAAyC;QAAzC,iBAAY,GAAZ,YAAY,CAA6B;QACzC,UAAK,GAAL,KAAK,CAAU;IAC9B,CAAC;IAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;QAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;YACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;QACJ,CAAC,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;gBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAtB,CAAsB,CAAC,EAC5E;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACH,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;QACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;QAED,mDAAmD;QACnD,sDAAsD;QACtD,mDAAmD;QACnD,wBAAwB;QACxB,iCAAiC;QACjC,0CAA0C;QAC1C,0CAA0C;QAC1C,qCAAqC;QACrC,0CAA0C;QAElC,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;YACL,8BAA8B;YAC9B,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBACxC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;gBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACrF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACzF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACvF;SACF;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;;;;;OAUG;IACH,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;QAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACzE,sEAAsE;gBACtE,qDAAqD;gBACrD,qEAAqE;gBACrE,YAAY,GAAG,EAAE,CAAC;aACnB;YACD,+FAA+F;YAC/F,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;YAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;SAC/F;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd;;;;mBAIG;gBACH,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;oBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBAClC;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;gBACzF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;gBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;aAClC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;aACxC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC;aAC7C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;aAC9C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC7F;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IACH,iBAAC;AAAD,CAAC,AAzOD,IAyOC;;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,CAAC,CAAC,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;IACD,wBAAwB;IACxB,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;IAC3C,uBAAuB;IACvB,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;IACD,uBAAuB;IACvB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IAClD,wBAAwB;IACxB,OAAO,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,IAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;YAAxB,IAAM,IAAI,iBAAA;YACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YAC9C,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,CAAC,CAAC,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,CAAC,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;KACH;IACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;QACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;IACxB,CAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;KACvC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YACvC,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;oBACnB,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;oBACtC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;YACzD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;aACH;YACD,KAAK,GAAG,KAAK,CAAC;SACf;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;IAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;KACzD;IACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;KACzE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;IAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;KAC1D;IACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;QAAlC,IAAM,GAAG,SAAA;QACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;KAC5F;IACD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;GAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;SACH;QAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;SACnF;QACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;iBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAAQ,SAAoB,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAAzB,IAAM,QAAQ,cAAA;oBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;oBAClC,CAAC,CAAC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;oBAClD,CAAC,CAAC,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;gBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;wBACjC,oDAAoD;wBACpD,2DAA2D;wBAC3D,gCAAgC;wBAChC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;wBACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC5C;yBAAM,IAAI,cAAc,CAAC,YAAY,EAAE;wBACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;qBAChF;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC1C;iBACF;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;oCAC/B,cAAc;gBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,EAArB,CAAqB,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;iBACH;;YARH,KAAK,IAAM,cAAc,IAAI,MAAM;wBAAxB,cAAc;aASxB;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB;IAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC3C,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;QAAtC,IAAM,GAAG,SAAA;QACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAA9C,IAAM,SAAS,SAAA;gBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;gBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;aACH;iBAAM;gBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;gBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;oBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;oBAC3D,IAAI,kBAAkB,EAAE;wBACtB,iBAAiB,GAAG,EAAE,CAAC;qBACxB;iBACF;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;aACH;SACF;aAAM;YACL,kFAAkF;YAClF,IAAI,gBAAgB,SAAA,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;YACvB,sCAAsC;YACtC,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;gBAArB,IAAM,IAAI,cAAA;gBACb,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;YACtE,sEAAsE;YACtE,yEAAyE;YACzE,kFAAkF;YAClF,kFAAkF;YAClF,gGAAgG;YAChG,8FAA8F;YAC9F,qFAAqF;YACrF,mFAAmF;YACnF,sFAAsF;YACtF,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,SAAA,CAAC;YACpB,SAAS;YACT,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,yFAAyF;gBACzF,6CAA6C;gBAC7C,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;oBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;wBACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;qBAC5B;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;YACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC;QAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;IAElB,2BAA2B;IAC3B,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;SACpF;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;IAElB,2BAA2B;IAC3B,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;YAChC,+FAA+F;YAC/F,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;SACxF;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;oBAC7B,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,OAAO,CACL,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CACrE,CAAC;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,OAAO,CACL,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAChE,CAAC;AACJ,CAAC;AAoHD,0BAA0B;AAC1B,MAAM,UAAU,eAAe,CAAC,WAAgB;IAC9C,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,IAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;SAC/D;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CAAmB,CAAW;IAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;QAAhB,IAAM,GAAG,UAAA;QACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,IAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts new file mode 100644 index 0000000..0a54a86 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts @@ -0,0 +1,168 @@ +/// +import { TokenCredential } from "@azure/core-auth"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { OperationArguments } from "./operationArguments"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { Mapper, Serializer } from "./serializer"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +import { ServiceCallback } from "./util/utils"; +import { Agent } from "http"; +/** + * HTTP proxy settings (Node.js only) + */ +export interface ProxySettings { + host: string; + port: number; + username?: string; + password?: string; +} +/** + * HTTP and HTTPS agents (Node.js only) + */ +export interface AgentSettings { + http: Agent; + https: Agent; +} +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-command-name" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only). + */ + agentSettings?: AgentSettings; + /** + * If specified: + * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient. + * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to "https://management.azure.com/.default". + * + * If it is not specified: + * - All OperationSpecs must contain a baseUrl property. + * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be "https://management.azure.com/.default". + */ + baseUri?: string; +} +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +export declare class ServiceClient { + /** + * The base URI against which requests will be made when using this ServiceClient instance. + * + * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient. + * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default "https://management.azure.com/.default" + * + * If it is not specified, all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + constructor(credentials?: ServiceClientCredentials | TokenCredential, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map new file mode 100644 index 0000000..2668428 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.d.ts","sourceRoot":"","sources":["../../lib/serviceClient.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAqB,MAAM,kBAAkB,CAAC;AACtE,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAElF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC9E,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAIL,aAAa,EACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAqB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AACnE,OAAO,EAEL,2BAA2B,EAC5B,MAAM,kCAAkC,CAAC;AAQ1C,OAAO,EAA0B,eAAe,EAAkB,MAAM,2BAA2B,CAAC;AACpG,OAAO,EAEL,oBAAoB,EAGrB,MAAM,0BAA0B,CAAC;AAKlC,OAAO,EAAqC,MAAM,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AAIjG,OAAO,EAEL,qBAAqB,EACrB,eAAe,EAGhB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,eAAe,EAAE,MAAM,cAAc,CAAC;AAI/C,OAAO,EAAE,KAAK,EAAE,MAAM,MAAM,CAAC;AAM7B;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,KAAK,CAAC;IACZ,KAAK,EAAE,KAAK,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,sBAAsB,CAAC,EACnB,oBAAoB,EAAE,GACtB,CAAC,CAAC,6BAA6B,EAAE,oBAAoB,EAAE,KAAK,IAAI,GAAG,oBAAoB,EAAE,CAAC,CAAC;IAC/F;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IACxC;;OAEG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB;;OAEG;IACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;IACpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC;IACxC;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC;;OAEG;IACH,2BAA2B,CAAC,EAAE,2BAA2B,CAAC;IAC1D;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,0BAA0B,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAChF;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,gBAAgB,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAC5D;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC;IAClC;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,qBAAa,aAAa;IACxB;;;;;;;OAOG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,qBAAqB,CAA2B;IAEjE,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAyB;IACjE,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAU;IAE3C;;;;;OAKG;gBAED,WAAW,CAAC,EAAE,wBAAwB,GAAG,eAAe,EACxD,OAAO,CAAC,EAAE,oBAAoB;IAiDhC;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA8B7F;;;;;OAKG;IACH,oBAAoB,CAClB,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,QAAQ,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,GAC9B,OAAO,CAAC,YAAY,CAAC;CAkMzB;AAED,wBAAgB,oBAAoB,CAClC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,eAAe,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,GAC3B,IAAI,CAsEN;AAkFD,oBAAY,cAAc,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAAC;AAE7D;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,cAAc,CAYhG;AAiBD,wBAAgB,0CAA0C,CACxD,aAAa,EAAE,aAAa,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,eAAe,EAAE,MAAM,EACvB,UAAU,EAAE,UAAU,GACrB,GAAG,CAkEL;AA6BD,wBAAgB,eAAe,CAC7B,SAAS,EAAE,qBAAqB,EAChC,YAAY,EAAE,iBAAiB,GAAG,SAAS,GAC1C,YAAY,CAqEd"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js new file mode 100644 index 0000000..89b329b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js @@ -0,0 +1,488 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __assign, __spreadArrays } from "tslib"; +import { isTokenCredential } from "@azure/core-auth"; +import { DefaultHttpClient } from "./defaultHttpClient"; +import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; +import { isStreamOperation } from "./operationSpec"; +import { deserializationPolicy, } from "./policies/deserializationPolicy"; +import { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { userAgentPolicy, getDefaultUserAgentHeaderName, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { RequestPolicyOptions, } from "./policies/requestPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { MapperType } from "./serializer"; +import { URLBuilder } from "./url"; +import * as utils from "./util/utils"; +import { stringifyXML } from "./util/xml"; +import { isWebResourceLike, WebResource, } from "./webResource"; +import { agentPolicy } from "./policies/agentPolicy"; +import { proxyPolicy, getDefaultProxySettings } from "./policies/proxyPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { AzureIdentityCredentialAdapter, azureResourceManagerEndpoints, } from "./credentials/azureIdentityTokenCredentialAdapter"; +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new DefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; +}()); +export { ServiceClient }; +export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(utils.prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } +} +function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = __assign(__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; +} +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent, propertyPath) { + if (parent && propertyPath) { + var propertyPathLength = propertyPath.length; + for (var i = 0; i < propertyPathLength - 1; ++i) { + var propertyName = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +export function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = __spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(__assign(__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map new file mode 100644 index 0000000..fd4afe6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.js","sourceRoot":"","sources":["../../lib/serviceClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAmB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAEtE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAKxD,OAAO,EACL,0BAA0B,EAC1B,8BAA8B,GAG/B,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAiB,MAAM,iBAAiB,CAAC;AACnE,OAAO,EACL,qBAAqB,GAEtB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EACL,eAAe,EACf,6BAA6B,EAC7B,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,sBAAsB,EAAmB,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACpG,OAAO,EAGL,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAA6C,UAAU,EAAc,MAAM,cAAc,CAAC;AACjG,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACnC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAC1C,OAAO,EAIL,iBAAiB,EACjB,WAAW,GACZ,MAAM,eAAe,CAAC;AAGvB,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,WAAW,EAAE,uBAAuB,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AAEzE,OAAO,EACL,8BAA8B,EAC9B,6BAA6B,GAC9B,MAAM,mDAAmD,CAAC;AAqG3D;;;GAGG;AACH;IA0BE;;;;;OAKG;IACH,uBACE,WAAwD,EACxD,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;QAED,IAAI,wBAA8D,CAAC;QACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;YAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;YAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;gBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;aACvC;YACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;SACnF;aAAM;YACL,wBAAwB,GAAG,WAAW,CAAC;SACxC;QAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,iBAAiB,EAAE,CAAC;QACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;YACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACpF,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,mCAAW,GAAX,UAAY,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAEtC,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;oBAAnD,IAAM,YAAY,SAAA;oBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;oBAAvD,IAAM,cAAc,SAAA;oBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;wBACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;wBACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;4BAChD,IAAI,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,mBAAmB,GAAG,EAAE,CAAC;iCAC1B;qCAAM;oCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;qCACvE;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;4BAC5C,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,EAAE;gBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;oBAAzD,IAAM,eAAe,SAAA;oBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,IAAI,SAAS,EAAE;wBAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;wBACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;gCAAvC,IAAM,GAAG,SAAA;gCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;gBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;aACnE;YAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;gBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAAzD,CAAyD,CAC1D,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,IAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;gBACJ,2CAA2C;iBAC1C,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,EAAxE,CAAwE,CAAC;iBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,EAAP,CAAO,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,oBAAC;AAAD,CAAC,AAhUD,IAgUC;;AAED,MAAM,UAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;IAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;QACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI;YACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;gBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;gBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7B,KAAK,CAAC,kBAAkB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;4BAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;yBACpC,CAAC,CAAC;qBACJ;iBACF;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;YAA7D,IAAM,iBAAiB,SAAA;YAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;gBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,QAAa;IAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;IAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,WAAW,EAAE;QACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;YACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC7B;aAAM;YACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;SAC5C;KACF;IAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IAED,IAAM,eAAe,yBAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;IACF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KAC5D;IAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;IACzE,IAAI,aAAa,EAAE;QACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,CAAC,aAAa,EAAE;QACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAID;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAsB,EAAE,YAAsB;IAC9E,IAAI,MAAM,IAAI,YAAY,EAAE;QAC1B,IAAM,kBAAkB,GAAW,YAAY,CAAC,MAAM,CAAC;QACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/C,IAAM,YAAY,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;gBACzB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;aAC3B;YACD,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;SAC/B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;4BACxB,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC;aAC7F;YAED,0CAA0C;YAC1C,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;SACnE;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;YACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;YACF,0CAA0C;YAC1C,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;YACxE,IAAI,aAAa,KAAK,SAAS,EAAE;gBAC/B,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;QACnD,8EAA8E;QAC9E,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;YACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;QACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YACtC,KAAK,EAAE,SAAS;SACjB,CAAC;IAFF,CAEE,CAAC;IAEL,IAAI,UAAU,EAAE;QACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,uBACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC;QAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,EAAxC,CAAwC,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;YACjD,8EAA8E;YAC9E,mDAAmD;YACnD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC;YACnF,IAAM,aAAa,GAAG,eAAI,UAAU,CAAyB,CAAC;YAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAA3C,IAAM,GAAG,SAAA;gBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;oBAAzC,IAAM,GAAG,SAAA;oBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnC,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;QACA,yCAAyC;QACzC,OAAO,oBAAoB,uBACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.d.ts b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts new file mode 100644 index 0000000..710361f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts @@ -0,0 +1,149 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: any): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: any): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map new file mode 100644 index 0000000..0c8174a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"url.d.ts","sourceRoot":"","sources":["../../lib/url.ts"],"names":[],"mappings":"AAOA;;GAEG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAA2D;IAErF;;OAEG;IACI,GAAG,IAAI,OAAO;IAIrB;;;;OAIG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,cAAc,EAAE,GAAG,GAAG,IAAI;IAW5D;;;OAGG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIhE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAoBzB;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CA0D5C;AAED;;GAEG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAAuB;IAErC;;;OAGG;IACI,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQlD;;OAEG;IACI,SAAS,IAAI,MAAM,GAAG,SAAS;IAItC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ9C;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAQvD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAgB9C;;;OAGG;IACI,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAkBjD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;OAEG;IACI,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQhD;;;;OAIG;IACI,iBAAiB,CAAC,kBAAkB,EAAE,MAAM,EAAE,mBAAmB,EAAE,GAAG,GAAG,IAAI;IASpF;;;OAGG;IACI,sBAAsB,CAAC,kBAAkB,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIxF;;OAEG;IACI,QAAQ,IAAI,MAAM,GAAG,SAAS;IAIrC;;OAEG;IACH,OAAO,CAAC,GAAG;IAqCJ,QAAQ,IAAI,MAAM;IA6BzB;;;OAGG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;WAUpD,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU;CAK9C;AAED,aAAK,iBAAiB,GAAG,QAAQ,GAAG,gBAAgB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEnG,aAAK,YAAY,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;AAElE,qBAAa,QAAQ;aACgB,IAAI,EAAE,MAAM;aAAkB,IAAI,EAAE,YAAY;gBAAhD,IAAI,EAAE,MAAM,EAAkB,IAAI,EAAE,YAAY;WAErE,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI9B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CAG5C;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAOlE;AAED;;GAEG;AACH,qBAAa,YAAY;IAMJ,QAAQ,CAAC,KAAK,EAAE,MAAM;IALzC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,aAAa,EAAE,iBAAiB,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,QAAQ,GAAG,SAAS,CAAC;gBAER,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,iBAAiB;IAMpE;;;OAGG;IACI,OAAO,IAAI,QAAQ,GAAG,SAAS;IAItC;;OAEG;IACI,IAAI,IAAI,OAAO;CAmCvB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.js b/node_modules/@azure/ms-rest-js/es/lib/url.js new file mode 100644 index 0000000..0de4c16 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.js @@ -0,0 +1,595 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { replaceAll } from "./util/utils"; +/** + * A class that handles the query portion of a URLBuilder. + */ +var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; +}()); +export { URLQuery }; +/** + * A class that handles creating, modifying, and parsing URLs. + */ +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; +}()); +export { URLBuilder }; +var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; +}()); +export { URLToken }; +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; +}()); +export { URLTokenizer }; +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); +} +function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} +//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.js.map b/node_modules/@azure/ms-rest-js/es/lib/url.js.map new file mode 100644 index 0000000..68d9e43 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.js.map @@ -0,0 +1 @@ +{"version":3,"file":"url.js","sourceRoot":"","sources":["../../lib/url.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C;;GAEG;AACH;IAAA;QACmB,cAAS,GAAwD,EAAE,CAAC;IAqHvF,CAAC;IAnHC;;OAEG;IACI,sBAAG,GAAV;QACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,CAAC;IAED;;;;OAIG;IACI,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;QACnD,IAAI,aAAa,EAAE;YACjB,IAAI,cAAc,IAAI,SAAS,EAAE;gBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;gBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;IACH,CAAC;IAED;;;OAGG;IACI,sBAAG,GAAV,UAAW,aAAqB;QAC9B,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACnE,CAAC;IAED;;OAEG;IACI,2BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;oBAA/C,IAAM,qBAAqB,uBAAA;oBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACW,cAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY,EAAE;oBACpB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,eAAC;AAAD,CAAC,AAtHD,IAsHC;;AAED;;GAEG;AACH;IAAA;IAiPA,CAAC;IA1OC;;;OAGG;IACI,8BAAS,GAAhB,UAAiB,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACI,8BAAS,GAAhB;QACE,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAiC;QAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;YACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;gBACvD,0FAA0F;gBAC1F,2FAA2F;gBAC3F,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;IACH,CAAC;IAED;;;OAGG;IACI,+BAAU,GAAjB,UAAkB,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf,UAAgB,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;IACH,CAAC;IAED;;;;OAIG;IACI,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;QAC3E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;IACH,CAAC;IAED;;;OAGG;IACI,2CAAsB,GAA7B,UAA8B,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1D,CAAC;IAED;;OAEG;IACK,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;QACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI,EAAE;oBAClB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;iBAC/D;aACF;SACF;IACH,CAAC;IAEM,6BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACI,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;IACH,CAAC;IAEa,gBAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,iBAAC;AAAD,CAAC,AAjPD,IAiPC;;AAMD;IACE,kBAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;IAAG,CAAC;IAEzE,eAAM,GAApB,UAAqB,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrC,CAAC;IACH,eAAC;AAAD,CAAC,AAtBD,IAsBC;;AAED;;;GAGG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAiB;IACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,OAAO,CACL,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,CAAC,SAAS,CAClE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH;IAME,sBAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED;;;OAGG;IACI,8BAAO,GAAd;QACE,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;OAEG;IACI,2BAAI,GAAX;QACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa,EAAE;gBAC1B,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IACH,mBAAC;AAAD,CAAC,AA1DD,IA0DC;;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;GAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,EAAlC,CAAkC,CAAC,CAAC;AACzF,CAAC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB;IAAE,+BAAkC;SAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;QAAlC,8CAAkC;;IACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAA/C,CAA+C,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts new file mode 100644 index 0000000..5c40c44 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map new file mode 100644 index 0000000..89074b2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/base64.browser.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAMzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAOtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js new file mode 100644 index 0000000..1c76f7b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value) { + var str = ""; + for (var i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value) { + var byteString = atob(value); + var arr = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map new file mode 100644 index 0000000..47aea45 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.js","sourceRoot":"","sources":["../../../lib/util/base64.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACtC;IACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/B,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;KACnC;IACD,OAAO,GAAG,CAAC;AACb,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts new file mode 100644 index 0000000..1dfd806 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map new file mode 100644 index 0000000..3af7f47 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.d.ts","sourceRoot":"","sources":["../../../lib/util/base64.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.js b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js new file mode 100644 index 0000000..6626510 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map new file mode 100644 index 0000000..0ca9f54 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.js","sourceRoot":"","sources":["../../../lib/util/base64.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,kGAAkG;IAClG,mGAAmG;IACnG,IAAM,WAAW,GAAG,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts new file mode 100644 index 0000000..aee2743 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts @@ -0,0 +1,94 @@ +export declare const Constants: { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: string; + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: string; + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map new file mode 100644 index 0000000..4184fb1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../lib/util/constants.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,SAAS;IACpB;;;;OAIG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;OAEG;;IAGH;;OAEG;;;QAID;;;;;WAKG;;;;;;;;;;;;;;IAgBL;;OAEG;;QAED;;;;;WAKG;;;QAKH;;;;;;;WAOG;;QAGH;;;;;WAKG;;;CAGN,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.js b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js new file mode 100644 index 0000000..0806b4a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.7.0", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map new file mode 100644 index 0000000..3118dae --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../lib/util/constants.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,MAAM,CAAC,IAAM,SAAS,GAAG;IACvB;;;;OAIG;IACH,aAAa,EAAE,OAAO;IAEtB;;;;;OAKG;IACH,IAAI,EAAE,OAAO;IAEb;;;;;OAKG;IACH,KAAK,EAAE,QAAQ;IAEf;;;;;OAKG;IACH,UAAU,EAAE,YAAY;IAExB;;;;;OAKG;IACH,WAAW,EAAE,aAAa;IAE1B;;OAEG;IACH,QAAQ,EAAE,UAAU;IAEpB;;OAEG;IACH,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;QACb;;;;;WAKG;QACH,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;SACrB;KACF;IAED;;OAEG;IACH,eAAe,EAAE;QACf;;;;;WAKG;QACH,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;QAE9B;;;;;;;WAOG;QACH,WAAW,EAAE,aAAa;QAE1B;;;;;WAKG;QACH,UAAU,EAAE,YAAY;KACzB;CACF,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts new file mode 100644 index 0000000..56eabdc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts @@ -0,0 +1,157 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export declare function objectValues(obj: { + [key: string]: any; +}): any[]; +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: any): Promise; +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export declare function mergeObjects(source: { + [key: string]: any; +}, target: { + [key: string]: any; +}): { + [key: string]: any; +}; +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export declare function delay(t: number, value?: T): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null. + * @param {TResult} [result] The deserialized response body if an error did not occur. + * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur. + * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): Function; +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): Function; +export declare function prepareXMLRootList(obj: any, elementName: string): { + [x: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtor: any, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: any): boolean; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map new file mode 100644 index 0000000..ebb7618 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../lib/util/utils.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;GAEG;AACH,eAAO,MAAM,MAAM,SAIM,CAAC;AAE1B;;;;;GAKG;AACH,wBAAgB,UAAU,CAAC,UAAU,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,GAAG,OAAO,CAEpE;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,qBAAqB,GAAG,GAAG,CAMlE;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAMtE;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAMjD;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,GAAG,GAAG,EAAE,CAkB/D;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,2BAA2B,CAAC,gBAAgB,EAAE,KAAK,CAAC,GAAG,CAAC,EAAE,SAAS,EAAE,GAAG,gBAMvF;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,MAAM,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,EAAE,MAAM,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE;;EAK1F;AAED;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAEzD;AAED;;GAEG;AACH,MAAM,WAAW,eAAe,CAAC,OAAO;IACtC;;;;;;OAMG;IACH,CACE,GAAG,EAAE,KAAK,GAAG,SAAS,GAAG,IAAI,EAC7B,MAAM,CAAC,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,GAC/B,IAAI,CAAC;CACT;AAED;;;;;GAKG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,QAAQ,CAcjE;AAED;;;;GAIG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,qBAAqB,CAAC,GAAG,QAAQ,CAc7F;AAED,wBAAgB,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,WAAW,EAAE,MAAM;;EAK/D;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,UAAU,EAAE,GAAG,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,IAAI,CAMrE;AAID;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,GACnB,MAAM,GAAG,SAAS,CAEpB;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,GAAG,GAAG,OAAO,CAEnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.js b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js new file mode 100644 index 0000000..2487768 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js @@ -0,0 +1,229 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { v4 as uuidv4 } from "uuid"; +import { Constants } from "./constants"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck) { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); +} +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export function objectValues(obj) { + var result = []; + if (obj && obj instanceof Object) { + for (var key in obj) { + if (obj.hasOwnProperty(key)) { + result.push(obj[key]); + } + } + } + else { + throw new Error("The provided object " + JSON.stringify(obj, undefined, 2) + " is not a valid object that can be " + "enumerated to provide its values as an array."); + } + return result; +} +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export function generateUuid() { + return uuidv4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export function mergeObjects(source, target) { + Object.keys(source).forEach(function (key) { + target[key] = source[key]; + }); + return target; +} +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; +} +export function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map new file mode 100644 index 0000000..a59b702 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../lib/util/utils.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,EAAE,IAAI,MAAM,EAAE,MAAM,MAAM,CAAC;AAIpC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,UAAgC;IACzD,OAAO,UAAU,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,KAAK,CAAC;AAC/D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,aAAa,CAAC,QAA+B;IAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,OAAwB;IACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;IACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,GAA2B;IACtD,IAAM,MAAM,GAAU,EAAE,CAAC;IACzB,IAAI,GAAG,IAAI,GAAG,YAAY,MAAM,EAAE;QAChC,KAAK,IAAM,GAAG,IAAI,GAAG,EAAE;YACrB,IAAI,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;gBAC3B,MAAM,CAAC,IAAI,CAAO,GAAI,CAAC,GAAG,CAAC,CAAC,CAAC;aAC9B;SACF;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CACb,yBAAuB,IAAI,CAAC,SAAS,CACnC,GAAG,EACH,SAAS,EACT,CAAC,CACF,wCAAqC,GAAG,+CAA+C,CACzF,CAAC;KACH;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,OAAO,MAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;IACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;QACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,MAA8B,EAAE,MAA8B;IACzF,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,UAAC,GAAG;QAC9B,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;IAC5B,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,KAAK,CAAI,CAAS,EAAE,KAAS;IAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,EAAd,CAAc,EAAE,CAAC,CAAC,EAAnC,CAAmC,CAAC,CAAC;AACvE,CAAC;AAqBD;;;;;GAKG;AACH,MAAM,UAAU,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAY;QAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;YACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;QACtB,CAAC,EACD,UAAC,GAAU;YACT,EAAE,CAAC,GAAG,CAAC,CAAC;QACV,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CAAI,OAAuC;IACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAsB;QAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;YAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC5E,CAAC,EACD,UAAC,GAAU;YACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;QAC5B,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;IAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;AAChC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,UAAe,EAAE,WAAkB;IAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;QAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;YAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QAC3D,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;AAElM;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,KAAU;IACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,CAAC,IAAI,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts new file mode 100644 index 0000000..a9d4674 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts @@ -0,0 +1,5 @@ +export declare function parseXML(str: string): Promise; +export declare function stringifyXML(obj: any, opts?: { + rootName?: string; +}): string; +//# sourceMappingURL=xml.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map new file mode 100644 index 0000000..bdc6fc6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAsBA,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAUlD;AAwFD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAMlE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js new file mode 100644 index 0000000..18dbadc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var _a, _b; +var parser = new DOMParser(); +// Policy to make our code Trusted Types compliant. +// https://github.com/w3c/webappsec-trusted-types +// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. +// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing +// according to the spec. There are no HTML/XSS security concerns on the usage of +// parseFromString() here. +var ttPolicy; +try { + if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", { + createHTML: function (s) { return s; }, + }); + } +} +catch (e) { + console.warn('Could not create trusted types policy "@azure/ms-rest-js#xml.browser"'); +} +export function parseXML(str) { + var _a; + try { + var dom = parser.parseFromString(((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _a !== void 0 ? _a : str), "application/xml"); + throwIfError(dom); + var obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +var errorNS = ""; +try { + var invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); + errorNS = (_b = parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _b !== void 0 ? _b : ""; +} +catch (ignored) { + // Most browsers will return a document containing , but IE will throw. +} +function throwIfError(dom) { + if (errorNS) { + var parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0).innerHTML); + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node) { + var result = {}; + var childNodeCount = node.childNodes.length; + var firstChildNode = node.childNodes[0]; + var onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + var elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + for (var i = 0; i < elementWithAttributes.attributes.length; i++) { + var attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (var i = 0; i < childNodeCount; i++) { + var child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + var childObject = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +// tslint:disable-next-line:no-null-keyword +var doc = document.implementation.createDocument(null, null, null); +var serializer = new XMLSerializer(); +export function stringifyXML(obj, opts) { + var rootName = (opts && opts.rootName) || "root"; + var dom = buildNode(obj, rootName)[0]; + return ('' + serializer.serializeToString(dom)); +} +function buildAttributes(attrs) { + var result = []; + for (var _i = 0, _a = Object.keys(attrs); _i < _a.length; _i++) { + var key = _a[_i]; + var attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName) { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + var elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + var result = []; + for (var _i = 0, obj_1 = obj; _i < obj_1.length; _i++) { + var arrayElem = obj_1[_i]; + for (var _a = 0, _b = buildNode(arrayElem, elementName); _a < _b.length; _a++) { + var child = _b[_a]; + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + var elem = doc.createElement(elementName); + for (var _c = 0, _d = Object.keys(obj); _c < _d.length; _c++) { + var key = _d[_c]; + if (key === "$") { + for (var _e = 0, _f = buildAttributes(obj[key]); _e < _f.length; _e++) { + var attr = _f[_e]; + elem.attributes.setNamedItem(attr); + } + } + else { + for (var _g = 0, _h = buildNode(obj[key], key); _g < _h.length; _g++) { + var child = _h[_g]; + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error("Illegal value passed to buildObject: " + obj); + } +} +//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map new file mode 100644 index 0000000..55f34fb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.js","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,IAAM,MAAM,GAAG,IAAI,SAAS,EAAE,CAAC;AAE/B,mDAAmD;AACnD,mDAAmD;AACnD,uFAAuF;AACvF,qFAAqF;AACrF,kFAAkF;AAClF,0BAA0B;AAC1B,IAAI,QAA2D,CAAC;AAChE,IAAI;IACF,IAAI,OAAO,IAAI,CAAC,YAAY,KAAK,WAAW,EAAE;QAC5C,QAAQ,GAAG,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,+BAA+B,EAAE;YACzE,UAAU,EAAE,UAAC,CAAM,IAAK,OAAA,CAAC,EAAD,CAAC;SAC1B,CAAC,CAAC;KACJ;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,IAAI,CAAC,uEAAuE,CAAC,CAAC;CACvF;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW;;IAClC,IAAI;QACF,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,OAAC,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,GAAG,oCAAK,GAAG,CAAW,EAAE,iBAAiB,CAAC,CAAC;QACpG,YAAY,CAAC,GAAG,CAAC,CAAC;QAElB,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3C,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;KAC7B;IAAC,OAAO,GAAG,EAAE;QACZ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;AACH,CAAC;AAED,IAAI,OAAO,GAAG,EAAE,CAAC;AACjB,IAAI;IACF,IAAM,UAAU,GAAG,OAAC,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS,oCAAK,SAAS,CAAW,CAAC;IAC5E,OAAO,SACL,MAAM,CAAC,eAAe,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;SAClF,YAAa,mCAAI,EAAE,CAAC;CAC1B;AAAC,OAAO,OAAO,EAAE;IAChB,oFAAoF;CACrF;AAED,SAAS,YAAY,CAAC,GAAa;IACjC,IAAI,OAAO,EAAE;QACX,IAAM,YAAY,GAAG,GAAG,CAAC,sBAAsB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QACxE,IAAI,YAAY,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAE,CAAC,SAAS,CAAC,CAAC;SAClD;KACF;AACH,CAAC;AAED,SAAS,SAAS,CAAC,IAAU;IAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,IAAU;IACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,CAAC;AAED,SAAS,WAAW,CAAC,IAAU;IAC7B,IAAI,MAAM,GAAQ,EAAE,CAAC;IAErB,IAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAEtD,IAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChD,IAAM,kBAAkB,GACtB,CAAC,cAAc;QACb,cAAc,KAAK,CAAC;QACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;QAC1C,cAAc,CAAC,SAAS,CAAC;QAC3B,SAAS,CAAC;IAEZ,IAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjF,IAAI,qBAAqB,EAAE;QACzB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,IAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjD,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;SAC7C;QAED,IAAI,kBAAkB,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,GAAG,kBAAkB,CAAC;SAClC;KACF;SAAM,IAAI,cAAc,KAAK,CAAC,EAAE;QAC/B,MAAM,GAAG,EAAE,CAAC;KACb;SAAM,IAAI,kBAAkB,EAAE;QAC7B,MAAM,GAAG,kBAAkB,CAAC;KAC7B;IAED,IAAI,CAAC,kBAAkB,EAAE;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,IAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjC,2CAA2C;YAC3C,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;gBACrC,IAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,CAAC,CAAC;gBAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;oBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;iBACtC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;oBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;iBAC1C;qBAAM;oBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;iBAChE;aACF;SACF;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,2CAA2C;AAC3C,IAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AACrE,IAAM,UAAU,GAAG,IAAI,aAAa,EAAE,CAAC;AAEvC,MAAM,UAAU,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC;IACnD,IAAM,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACxC,OAAO,CACL,yDAAyD,GAAG,UAAU,CAAC,iBAAiB,CAAC,GAAG,CAAC,CAC9F,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CAAC,KAAgD;IACvE,IAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAkB,UAAkB,EAAlB,KAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAlB,cAAkB,EAAlB,IAAkB,EAAE;QAAjC,IAAM,GAAG,SAAA;QACZ,IAAM,IAAI,GAAG,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;QACtC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB;IAC9C,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,SAAS,EAAE;QAClF,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QAC5C,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;QAClC,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QAC7B,IAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAwB,UAAG,EAAH,WAAG,EAAH,iBAAG,EAAH,IAAG,EAAE;YAAxB,IAAM,SAAS,YAAA;YAClB,KAAoB,UAAiC,EAAjC,KAAA,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC,EAAjC,cAAiC,EAAjC,IAAiC,EAAE;gBAAlD,IAAM,KAAK,SAAA;gBACd,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACpB;SACF;QACD,OAAO,MAAM,CAAC;KACf;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QAC5C,KAAkB,UAAgB,EAAhB,KAAA,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAhB,cAAgB,EAAhB,IAAgB,EAAE;YAA/B,IAAM,GAAG,SAAA;YACZ,IAAI,GAAG,KAAK,GAAG,EAAE;gBACf,KAAmB,UAAyB,EAAzB,KAAA,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;oBAAzC,IAAM,IAAI,SAAA;oBACb,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;iBACpC;aACF;iBAAM;gBACL,KAAoB,UAAwB,EAAxB,KAAA,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAAzC,IAAM,KAAK,SAAA;oBACd,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;iBACzB;aACF;SACF;QACD,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,0CAAwC,GAAK,CAAC,CAAC;KAChE;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts new file mode 100644 index 0000000..3290810 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts @@ -0,0 +1,5 @@ +export declare function stringifyXML(obj: any, opts?: { + rootName?: string; +}): string; +export declare function parseXML(str: string): Promise; +//# sourceMappingURL=xml.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map new file mode 100644 index 0000000..e8c5349 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.ts"],"names":[],"mappings":"AAKA,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAQlE;AAED,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAmBlD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.js b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js new file mode 100644 index 0000000..3e3bd25 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as xml2js from "xml2js"; +export function stringifyXML(obj, opts) { + var builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} +export function parseXML(str) { + var xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise(function (resolve, reject) { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, function (err, res) { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map new file mode 100644 index 0000000..6cf4bd0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.js","sourceRoot":"","sources":["../../../lib/util/xml.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,UAAU,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,OAAO,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC;QACjC,QAAQ,EAAE,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,QAAQ;QAC/B,UAAU,EAAE;YACV,MAAM,EAAE,KAAK;SACd;KACF,CAAC,CAAC;IACH,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW;IAClC,IAAM,SAAS,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC;QAClC,aAAa,EAAE,KAAK;QACpB,eAAe,EAAE,KAAK;QACtB,YAAY,EAAE,KAAK;KACpB,CAAC,CAAC;IACH,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAQ,EAAE,GAAQ;gBAC5C,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts new file mode 100644 index 0000000..7eb39af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts @@ -0,0 +1,337 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { OperationSpec } from "./operationSpec"; +import { Mapper } from "./serializer"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { AgentSettings, ProxySettings } from "./serviceClient"; +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + readonly aborted: boolean; + dispatchEvent: (event: Event) => boolean; + onabort: ((this: AbortSignalLike, ev: Event) => any) | null; + addEventListener: (type: "abort", listener: (this: AbortSignalLike, ev: Event) => any, options?: any) => void; + removeEventListener: (type: "abort", listener: (this: AbortSignalLike, ev: Event) => any, options?: any) => void; +} +/** + * An abstraction over a REST call. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * HTTP(S) agent configuration. + */ + agentSettings?: AgentSettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + /** + * Used to abort the request later. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: any): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +export declare class WebResource { + url: string; + method: HttpMethods; + body?: any; + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + formData?: any; + query?: { + [key: string]: any; + }; + operationSpec?: OperationSpec; + withCredentials: boolean; + timeout: number; + proxySettings?: ProxySettings; + keepAlive?: boolean; + agentSettings?: AgentSettings; + redirectLimit?: number; + abortSignal?: AbortSignalLike; + /** Callback which fires upon upload progress. */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + constructor(url?: string, method?: HttpMethods, body?: any, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, agentSettings?: AgentSettings, redirectLimit?: number); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: { "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } } + * - query-parameter-value in "string" format: { "query-parameter-name": "query-parameter-value"}. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: { "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } } + * - path-parameter-value in "string" format: { "path-parameter-name": "path-parameter-value" }. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: object; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + abortSignal?: AbortSignalLike; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + onUploadProgress?: (progress: TransferProgressEvent) => void; + onDownloadProgress?: (progress: TransferProgressEvent) => void; + streamResponseBody?: boolean; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + value: any; + skipUrlEncoding: boolean; + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * @property {object} [customHeaders] User defined custom request headers that + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + [key: string]: any; +} +//# sourceMappingURL=webResource.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map new file mode 100644 index 0000000..3943c0b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.d.ts","sourceRoot":"","sources":["../../lib/webResource.ts"],"names":[],"mappings":";AAGA,OAAO,EAAe,eAAe,EAAqB,MAAM,eAAe,CAAC;AAChF,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,MAAM,EAAc,MAAM,cAAc,CAAC;AAElD,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAE/D,oBAAY,WAAW,GACnB,KAAK,GACL,KAAK,GACL,MAAM,GACN,QAAQ,GACR,OAAO,GACP,MAAM,GACN,SAAS,GACT,OAAO,CAAC;AACZ,oBAAY,eAAe,GACvB,IAAI,GACJ,MAAM,GACN,WAAW,GACX,eAAe,GACf,CAAC,MAAM,MAAM,CAAC,cAAc,CAAC,CAAC;AAElC;;GAEG;AACH,oBAAY,qBAAqB,GAAG;IAClC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,aAAa,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IACzC,OAAO,EAAE,CAAC,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,CAAC;IAC5D,gBAAgB,EAAE,CAChB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,EACnD,OAAO,CAAC,EAAE,GAAG,KACV,IAAI,CAAC;IACV,mBAAmB,EAAE,CACnB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,EACnD,OAAO,CAAC,EAAE,GAAG,KACV,IAAI,CAAC;CACX;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;;;OAIG;IACH,yBAAyB,IAAI,IAAI,CAAC;IAElC;;OAEG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,CAAC;IACzD;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,GAAG,GAAG,MAAM,IAAI,eAAe,CAgBxE;AAED;;;;;;;GAOG;AACH,qBAAa,WAAW;IACtB,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,WAAW,CAAC;IACpB,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,EAAE,eAAe,CAAC;IACzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,eAAe,EAAE,OAAO,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B,iDAAiD;IACjD,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;gBAG7D,GAAG,CAAC,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,WAAW,EACpB,IAAI,CAAC,EAAE,GAAG,EACV,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,EAC9B,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,GAAG,eAAe,EAClD,kBAAkB,CAAC,EAAE,OAAO,EAC5B,eAAe,CAAC,EAAE,OAAO,EACzB,WAAW,CAAC,EAAE,eAAe,EAC7B,OAAO,CAAC,EAAE,MAAM,EAChB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC5D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC9D,aAAa,CAAC,EAAE,aAAa,EAC7B,SAAS,CAAC,EAAE,OAAO,EACnB,aAAa,CAAC,EAAE,aAAa,EAC7B,aAAa,CAAC,EAAE,MAAM;IAoBxB;;;;OAIG;IACH,yBAAyB,IAAI,IAAI;IASjC;;;;OAIG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,WAAW;IA6MpD;;;OAGG;IACH,KAAK,IAAI,WAAW;CAqCrB;AAED,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;;;;;;;;;OAUG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IAC1D;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;;;;;;;OASG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IACzD,QAAQ,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAClC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAC;IACrC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC7D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAC9B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,GAAG,CAAC;IACX,eAAe,EAAE,OAAO,CAAC;IACzB,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.js b/node_modules/@azure/ms-rest-js/es/lib/webResource.js new file mode 100644 index 0000000..8fd1460 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.js @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; +import { Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +export function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; +}()); +export { WebResource }; +//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map b/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map new file mode 100644 index 0000000..8cd8e4d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.js","sourceRoot":"","sources":["../../lib/webResource.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAmB,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAEhF,OAAO,EAAU,UAAU,EAAE,MAAM,cAAc,CAAC;AAClD,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA2J5C,MAAM,UAAU,iBAAiB,CAAC,MAAW;IAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,OAAO,KAAK,CAAC;KACd;IACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;QAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;QACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;QAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;QACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;QACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;QACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;GAOG;AACH;IAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;QAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,+CAAyB,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;IACH,CAAC;IAED;;;;OAIG;IACH,6BAAO,GAAP,UAAQ,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC;YACzF,CAAC,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,iCAAiC;QACjC,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;QAED,iBAAiB;QACjB,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;QAE1D,iDAAiD;QACjD,IAAI,OAAO,CAAC,YAAY,EAAE;YAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;YACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,KAAG,GACL,OAAO;gBACP,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClC,CAAC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAY,CAAC,CAAC;YACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,gBAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,CAAC,CAAC,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;6BAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;6BACJ,8EAA0E,aAAa,kCAA6B,CAAA;6BACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;SAChB;QAED,iHAAiH;QACjH,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,yFAAqF;oBACrF,mJAA2I,CAC9I,CAAC;aACH;YACD,uDAAuD;YACvD,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;YACD,wBAAwB;YACxB,IAAM,WAAW,GAAG,EAAE,CAAC;YACvB,4GAA4G;YAC5G,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF,CAAC,aAAa;YACf,yBAAyB;YACzB,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;QAED,kDAAkD;QAClD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAAlD,IAAM,UAAU,SAAA;gBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;QACD,0CAA0C;QAC1C,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;QACD,yCAAyC;QACzC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;SAC5D;QAED,UAAU;QACV,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;QAED,0HAA0H;QAC1H,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;YAC7B,6HAA6H;YAC7H,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;QAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QAErD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACH,2BAAK,GAAL;QACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,kBAAC;AAAD,CAAC,AArVD,IAqVC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts new file mode 100644 index 0000000..8a823d6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpClient } from "./httpClient"; +import { HttpHeaders } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeaders; +//# sourceMappingURL=xhrHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map new file mode 100644 index 0000000..b1aa40f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.d.ts","sourceRoot":"","sources":["../../lib/xhrHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,eAAe,EAAyB,MAAM,eAAe,CAAC;AACvE,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAGhE;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IACvC,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAuG7E;AAgBD,wBAAgB,YAAY,CAAC,GAAG,EAAE,cAAc,eAa/C"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js new file mode 100644 index 0000000..4fa6828 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "./httpHeaders"; +import { RestError } from "./restError"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +var XhrHttpClient = /** @class */ (function () { + function XhrHttpClient() { + } + XhrHttpClient.prototype.sendRequest = function (request) { + var xhr = new XMLHttpRequest(); + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + var abortSignal = request.abortSignal; + if (abortSignal) { + var listener_1 = function () { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener_1); + xhr.addEventListener("readystatechange", function () { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener_1); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + var formData = request.formData; + var requestForm_1 = new FormData(); + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (var _i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + var formKey = _a[_i]; + var formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm_1; + request.formData = undefined; + var contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (var _b = 0, _c = request.headers.headersArray(); _b < _c.length; _b++) { + var header = _c[_b]; + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (request.streamResponseBody) { + return new Promise(function (resolve, reject) { + xhr.addEventListener("readystatechange", function () { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + var blobBody = new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody: blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + return resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + }; + return XhrHttpClient; +}()); +export { XhrHttpClient }; +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", function (rawEvent) { + return listener({ + loadedBytes: rawEvent.loaded, + }); + }); + } +} +// exported locally for testing +export function parseHeaders(xhr) { + var responseHeaders = new HttpHeaders(); + var headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (var _i = 0, headerLines_1 = headerLines; _i < headerLines_1.length; _i++) { + var line = headerLines_1[_i]; + var index = line.indexOf(":"); + var headerName = line.slice(0, index); + var headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", function () { + return reject(new RestError("Failed to send request to " + request.url, RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + xhr.addEventListener("abort", function () { + return reject(new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request)); + }); + xhr.addEventListener("timeout", function () { + return reject(new RestError("timeout of " + xhr.timeout + "ms exceeded", RestError.REQUEST_SEND_ERROR, undefined, request)); + }); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map new file mode 100644 index 0000000..1c00438 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.js","sourceRoot":"","sources":["../../lib/xhrHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAG/F,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAG5C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH;IAAA;IAwGA,CAAC;IAvGQ,mCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;QAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;SAC7E;QAED,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACxC,IAAI,WAAW,EAAE;YACf,IAAM,UAAQ,GAAG;gBACf,GAAG,CAAC,KAAK,EAAE,CAAC;YACd,CAAC,CAAC;YACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;YAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;gBACvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;oBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;iBACpD;YACH,CAAC,CAAC,CAAC;SACJ;QAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,IAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YAClC,IAAM,aAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,IAAM,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;gBAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oBAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAsB,UAAqB,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gBAAxC,IAAM,OAAO,SAAA;gBAChB,IAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,OAAO,CAAC,IAAI,GAAG,aAAW,CAAC;YAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC7B,IAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,kEAAkE;gBAClE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;aACxC;SACF;QAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC9C,KAAqB,UAA8B,EAA9B,KAAA,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;YAAhD,IAAM,MAAM,SAAA;YACf,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACjD;QACD,GAAG,CAAC,YAAY,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;QAEhE,2CAA2C;QAC3C,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE3D,IAAI,OAAO,CAAC,kBAAkB,EAAE;YAC9B,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACjC,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;oBACvC,wCAAwC;oBACxC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;wBACtD,IAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,UAAC,OAAO,EAAE,MAAM;4BACjD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;gCAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;4BACxB,CAAC,CAAC,CAAC;4BACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;wBAC9C,CAAC,CAAC,CAAC;wBACH,OAAO,CAAC;4BACN,OAAO,SAAA;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;4BAC1B,QAAQ,UAAA;yBACT,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;gBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;aAAM;YACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;gBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;oBAC3B,OAAA,OAAO,CAAC;wBACN,OAAO,SAAA;wBACP,MAAM,EAAE,GAAG,CAAC,MAAM;wBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;wBAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;qBAC7B,CAAC;gBALF,CAKE,CACH,CAAC;gBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IACH,oBAAC;AAAD,CAAC,AAxGD,IAwGC;;AAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;IAEpD,IAAI,QAAQ,EAAE;QACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,UAAC,QAAQ;YACxC,OAAA,QAAQ,CAAC;gBACP,WAAW,EAAE,QAAQ,CAAC,MAAM;aAC7B,CAAC;QAFF,CAEE,CACH,CAAC;KACH;AACH,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,YAAY,CAAC,GAAmB;IAC9C,IAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;IAC1C,IAAM,WAAW,GAAG,GAAG;SACpB,qBAAqB,EAAE;SACvB,IAAI,EAAE;SACN,KAAK,CAAC,SAAS,CAAC,CAAC;IACpB,KAAmB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA3B,IAAM,IAAI,oBAAA;QACb,IAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,IAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;KAC9C;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;IAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;QAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,+BAA6B,OAAO,CAAC,GAAK,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;IAPD,CAOC,CACF,CAAC;IACF,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;QAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CAAC,yBAAyB,EAAE,SAAS,CAAC,qBAAqB,EAAE,SAAS,EAAE,OAAO,CAAC,CAC9F;IAFD,CAEC,CACF,CAAC;IACF,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC9B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,gBAAc,GAAG,CAAC,OAAO,gBAAa,EACtC,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;IAPD,CAOC,CACF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts new file mode 100644 index 0000000..2368e05 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + CommonRequestInfo, + CommonRequestInit, + CommonResponse, + FetchHttpClient, +} from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; + +export class BrowserFetchHttpClient extends FetchHttpClient { + prepareRequest(_httpRequest: WebResourceLike): Promise> { + return Promise.resolve({}); + } + + processRequest(_operationResponse: HttpOperationResponse): Promise { + return Promise.resolve(); + } + + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise { + return fetch(input, init); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts new file mode 100644 index 0000000..cface5a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; + +/** + * @interface ApiKeyCredentialOptions + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { [x: string]: any }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { [x: string]: any }; +} + +/** + * Authenticates to a service using an API key. + */ +export class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?: { [x: string]: any }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?: { [x: string]: any }; + + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error( + `options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.` + ); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise { + if (!webResource) { + return Promise.reject( + new Error(`webResource cannot be null or undefined and must be of type "object".`) + ); + } + + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts b/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts new file mode 100644 index 0000000..33992d8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { Constants as MSRestConstants } from "../util/constants"; +import { WebResource } from "../webResource"; + +import { TokenCredential } from "@azure/core-auth"; +import { TokenResponse } from "./tokenResponse"; + +const DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export const azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; + +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +export class AzureIdentityCredentialAdapter implements ServiceClientCredentials { + private azureTokenCredential: TokenCredential; + private scopes: string | string[]; + constructor( + azureTokenCredential: TokenCredential, + scopes: string | string[] = "https://management.azure.com/.default" + ) { + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + + public async getToken(): Promise { + const accessToken = await this.azureTokenCredential.getToken(this.scopes); + if (accessToken !== null) { + const result: TokenResponse = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return result; + } else { + throw new Error("Could find token for scope"); + } + } + + public async signRequest(webResource: WebResource) { + const tokenResponse = await this.getToken(); + webResource.headers.set( + MSRestConstants.HeaderConstants.AUTHORIZATION, + `${tokenResponse.tokenType} ${tokenResponse.accessToken}` + ); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts new file mode 100644 index 0000000..e92242d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; + +export class BasicAuthenticationCredentials implements ServiceClientCredentials { + userName: string; + password: string; + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME; + + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor( + userName: string, + password: string, + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME + ) { + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; + if (!webResource.headers) webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts new file mode 100644 index 0000000..7e9096b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export type Authenticator = (challenge: object) => Promise; diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts new file mode 100644 index 0000000..bf6615b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ApiKeyCredentials, ApiKeyCredentialOptions } from "./apiKeyCredentials"; + +export class DomainCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + constructor(domainKey: string) { + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + const options: ApiKeyCredentialOptions = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + super(options); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts new file mode 100644 index 0000000..9a0247d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { WebResourceLike } from "../webResource"; + +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike/request to be signed. + * @returns {Promise} The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts new file mode 100644 index 0000000..0151630 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; + +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +export class TokenCredentials implements ServiceClientCredentials { + token: string; + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME; + + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) { + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike) { + if (!webResource.headers) webResource.headers = new HttpHeaders(); + webResource.headers.set( + HeaderConstants.AUTHORIZATION, + `${this.authorizationScheme} ${this.token}` + ); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts b/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts new file mode 100644 index 0000000..aef8969 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * TokenResponse is defined in `@azure/ms-rest-nodeauth` and is copied here to not + * add an unnecessary dependency. + */ +export interface TokenResponse { + readonly tokenType: string; + readonly accessToken: string; + readonly [x: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts new file mode 100644 index 0000000..55d435a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ApiKeyCredentials, ApiKeyCredentialOptions } from "./apiKeyCredentials"; + +export class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + constructor(topicKey: string) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options: ApiKeyCredentialOptions = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts new file mode 100644 index 0000000..5ac7a79 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; diff --git a/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts new file mode 100644 index 0000000..2f1c29d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; diff --git a/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts new file mode 100644 index 0000000..d28b011 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import AbortController from "abort-controller"; +import FormData from "form-data"; + +import { HttpClient } from "./httpClient"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { HttpHeaders, HttpHeadersLike } from "./httpHeaders"; +import { RestError } from "./restError"; +import { Readable, Transform } from "stream"; + +interface FetchError extends Error { + code?: string; + errno?: string; + type?: string; +} + +export type CommonRequestInfo = string; // we only call fetch() on string urls. + +export type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; + +export type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; + +export abstract class FetchHttpClient implements HttpClient { + async sendRequest(httpRequest: WebResourceLike): Promise { + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error( + "'httpRequest' (WebResource) cannot be null or undefined and must be of type object." + ); + } + + const abortController = new AbortController(); + let abortListener: ((event: any) => void) | undefined; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError( + "The request was aborted", + RestError.REQUEST_ABORTED_ERROR, + undefined, + httpRequest + ); + } + + abortListener = (event: Event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + + if (httpRequest.timeout) { + setTimeout(() => { + abortController.abort(); + }, httpRequest.timeout); + } + + if (httpRequest.formData) { + const formData: any = httpRequest.formData; + const requestForm = new FormData(); + const appendFormValue = (key: string, value: any) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm.append(key, value.value, value.options); + } else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } else { + appendFormValue(formKey, formValue); + } + } + + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set( + "Content-Type", + `multipart/form-data; boundary=${requestForm.getBoundary()}` + ); + } else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + let loadedBytes = 0; + const uploadReportStream = new Transform({ + transform: (chunk: string | Buffer, _encoding, callback) => { + loadedBytes += chunk.length; + httpRequest.onUploadProgress!({ loadedBytes }); + callback(undefined, chunk); + }, + }); + + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } else { + uploadReportStream.end(body); + } + + body = uploadReportStream; + } + + const platformSpecificRequestInit: Partial = await this.prepareRequest( + httpRequest + ); + + const requestInit: RequestInit = { + body: body, + headers: httpRequest.headers.rawHeaders(), + method: httpRequest.method, + signal: abortController.signal, + redirect: "manual", + ...platformSpecificRequestInit, + }; + + let operationResponse: HttpOperationResponse | undefined; + try { + const response: CommonResponse = await this.fetch(httpRequest.url, requestInit); + + const headers = parseHeaders(response.headers); + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? ((response.body as unknown) as NodeJS.ReadableStream) + : undefined, + bodyAsText: !httpRequest.streamResponseBody ? await response.text() : undefined, + redirected: response.redirected, + url: response.url, + }; + + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody: ReadableStream | undefined = response.body || undefined; + + if (isReadableStream(responseBody)) { + let loadedBytes = 0; + const downloadReportStream = new Transform({ + transform: (chunk: string | Buffer, _encoding, callback) => { + loadedBytes += chunk.length; + onDownloadProgress({ loadedBytes }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } else { + const length = parseInt(headers.get("Content-Length")!) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + + await this.processRequest(operationResponse); + + return operationResponse; + } catch (error) { + const fetchError: FetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError( + fetchError.message, + RestError.REQUEST_SEND_ERROR, + undefined, + httpRequest + ); + } else if (fetchError.type === "aborted") { + throw new RestError( + "The request was aborted", + RestError.REQUEST_ABORTED_ERROR, + undefined, + httpRequest + ); + } + + throw fetchError; + } finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse?.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse!.readableStreamBody); + } + + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + httpRequest.abortSignal?.removeEventListener("abort", abortListener!); + return; + }) + .catch((_e) => {}); + } + } + } + + abstract async prepareRequest(httpRequest: WebResourceLike): Promise>; + abstract async processRequest(operationResponse: HttpOperationResponse): Promise; + abstract async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} + +function isReadableStream(body: any): body is Readable { + return body && typeof body.pipe === "function"; +} + +function isStreamComplete(stream: Readable): Promise { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} + +export function parseHeaders(headers: Headers): HttpHeadersLike { + const httpHeaders = new HttpHeaders(); + + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + + return httpHeaders; +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpClient.ts b/node_modules/@azure/ms-rest-js/lib/httpClient.ts new file mode 100644 index 0000000..0d01984 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpClient.ts @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { RequestPolicy } from "./policies/requestPolicy"; + +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy {} diff --git a/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts b/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts new file mode 100644 index 0000000..698279a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName: string) { + return headerName.toLowerCase(); +} + +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + + /** + * The value of the header. + */ + value: string; +} + +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export type RawHttpHeaders = { [headerName: string]: string }; + +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(): RawHttpHeaders; +} + +export function isHttpHeadersLike(object?: any): object is HttpHeadersLike { + if (!object || typeof object !== "object") { + return false; + } + + if ( + typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function" + ) { + return true; + } + return false; +} + +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + private readonly _headersMap: { [headerKey: string]: HttpHeader }; + + constructor(rawHeaders?: RawHttpHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + public set(headerName: string, headerValue: string | number): void { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + public get(headerName: string): string | undefined { + const header: HttpHeader = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + public contains(headerName: string): boolean { + return !!this._headersMap[getHeaderKey(headerName)]; + } + + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + public remove(headerName: string): boolean { + const result: boolean = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + + /** + * Get the headers that are contained this collection as an object. + */ + public rawHeaders(): RawHttpHeaders { + const result: RawHttpHeaders = {}; + for (const headerKey in this._headersMap) { + const header: HttpHeader = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + } + + /** + * Get the headers that are contained in this collection as an array. + */ + public headersArray(): HttpHeader[] { + const headers: HttpHeader[] = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + + /** + * Get the header names that are contained in this collection. + */ + public headerNames(): string[] { + const headerNames: string[] = []; + const headers: HttpHeader[] = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + + /** + * Get the header names that are contained in this collection. + */ + public headerValues(): string[] { + const headerValues: string[] = []; + const headers: HttpHeader[] = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + + /** + * Get the JSON object representation of this HTTP header collection. + */ + public toJson(): RawHttpHeaders { + return this.rawHeaders(); + } + + /** + * Get the string representation of this HTTP header collection. + */ + public toString(): string { + return JSON.stringify(this.toJson()); + } + + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + public clone(): HttpHeaders { + return new HttpHeaders(this.rawHeaders()); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts b/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts new file mode 100644 index 0000000..c61dfd5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { WebResourceLike } from "./webResource"; +import { HttpHeadersLike } from "./httpHeaders"; + +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + + /** + * The HTTP response status (e.g. 200) + */ + status: number; + + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} + +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob {} +} + +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { [key: string]: any }; + + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + + /** + * The redirected property indicates whether the response is the result of a request which was redirected. + */ + redirected?: boolean; + + /** + * The url property contains the URL of the response. The value will be the final URL obtained after any redirects. + */ + url?: string; +} + +/** + * The flattened response to a REST call. + * Contains the underlying HttpOperationResponse as well as + * the merged properties of the parsedBody, parsedHeaders, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + + [key: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts new file mode 100644 index 0000000..a8cf3a5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF, + + /** + * An error log. + */ + ERROR, + + /** + * A warning log. + */ + WARNING, + + /** + * An information log. + */ + INFO, +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts new file mode 100644 index 0000000..47929c4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; + +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} + +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + constructor(public minimumLogLevel: HttpPipelineLogLevel) {} + + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void { + const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/msRest.ts b/node_modules/@azure/ms-rest-js/lib/msRest.ts new file mode 100644 index 0000000..0b0ea24 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/msRest.ts @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/// + +export { + WebResource, + WebResourceLike, + HttpRequestBody, + RequestPrepareOptions, + HttpMethods, + ParameterValue, + RequestOptionsBase, + TransferProgressEvent, + AbortSignalLike, +} from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { CommonRequestInfo, CommonRequestInit, CommonResponse } from "./fetchHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { + OperationParameter, + OperationQueryParameter, + OperationURLParameter, + ParameterPath, +} from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { + AgentSettings, + ProxySettings, + ServiceClient, + ServiceClientOptions, + flattenResponse, +} from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptions, + RequestPolicyOptionsLike, +} from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { + TelemetryInfo, + userAgentPolicy, + getDefaultUserAgentValue, +} from "./policies/userAgentPolicy"; +export { + DeserializationContentTypes, + deserializationPolicy, + deserializeResponseBody, +} from "./policies/deserializationPolicy"; +export { + MapperType, + SimpleMapperType, + CompositeMapperType, + DictionaryMapperType, + SequenceMapperType, + EnumMapperType, + Mapper, + BaseMapper, + CompositeMapper, + SequenceMapper, + DictionaryMapper, + EnumMapper, + MapperConstraints, + PolymorphicDiscriminator, + Serializer, + UrlParameterValue, + serializeObject, +} from "./serializer"; +export { + stripRequest, + stripResponse, + delay, + executePromisesSequentially, + generateUuid, + encodeUri, + ServiceCallback, + promiseToCallback, + promiseToServiceCallback, + isValidUuid, + applyMixins, + isNode, + isDuration, +} from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; + +// Credentials +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { TokenResponse } from "./credentials/tokenResponse"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; diff --git a/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts new file mode 100644 index 0000000..065ecdf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as http from "http"; +import * as https from "https"; +import node_fetch from "node-fetch"; + +import { + CommonRequestInfo, + CommonRequestInit, + CommonResponse, + FetchHttpClient, +} from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +import { createProxyAgent, ProxyAgent } from "./proxyAgent"; + +export class NodeFetchHttpClient extends FetchHttpClient { + async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise { + return (node_fetch(input, init) as unknown) as Promise; + } + + async prepareRequest(httpRequest: WebResourceLike): Promise> { + const requestInit: Partial = {}; + + if (httpRequest.agentSettings) { + const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } else if (httpAgent) { + requestInit.agent = httpAgent; + } + } else if (httpRequest.proxySettings) { + const tunnel: ProxyAgent = createProxyAgent( + httpRequest.url, + httpRequest.proxySettings, + httpRequest.headers + ); + requestInit.agent = tunnel.agent; + } + + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } else { + const options: http.AgentOptions | https.AgentOptions = { keepAlive: true }; + const agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + + return requestInit; + } + + async processRequest(_operationResponse: HttpOperationResponse): Promise { + /* no_op */ + return; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationArguments.ts b/node_modules/@azure/ms-rest-js/lib/operationArguments.ts new file mode 100644 index 0000000..55090db --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationArguments.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { RequestOptionsBase } from "./webResource"; + +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationParameter.ts b/node_modules/@azure/ms-rest-js/lib/operationParameter.ts new file mode 100644 index 0000000..150b18a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationParameter.ts @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { Mapper } from "./serializer"; + +export type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath }; + +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} + +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} + +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} + +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter: OperationParameter): string { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} + +export function getPathStringFromParameterPath( + parameterPath: ParameterPath, + mapper: Mapper +): string { + let result: string; + if (typeof parameterPath === "string") { + result = parameterPath; + } else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } else { + result = mapper.serializedName!; + } + return result; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationResponse.ts b/node_modules/@azure/ms-rest-js/lib/operationResponse.ts new file mode 100644 index 0000000..0fbea88 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationResponse.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { Mapper } from "./serializer"; + +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationSpec.ts b/node_modules/@azure/ms-rest-js/lib/operationSpec.ts new file mode 100644 index 0000000..fe8b84a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationSpec.ts @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + OperationParameter, + OperationQueryParameter, + OperationURLParameter, +} from "./operationParameter"; +import { OperationResponse } from "./operationResponse"; +import { MapperType, Serializer } from "./serializer"; +import { HttpMethods } from "./webResource"; + +/** + * A specification that defines an operation. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { [responseCode: string]: OperationResponse }; +} + +export function isStreamOperation(operationSpec: OperationSpec): boolean { + let result = false; + for (const statusCode in operationSpec.responses) { + const operationResponse: OperationResponse = operationSpec.responses[statusCode]; + if ( + operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream + ) { + result = true; + break; + } + } + return result; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts new file mode 100644 index 0000000..cca7184 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { AgentSettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +const agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); + +export function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory { + return { + create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => { + throw agentNotSupportedInBrowser; + }, + }; +} + +export class AgentPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) { + super(nextPolicy, options); + throw agentNotSupportedInBrowser; + } + + public sendRequest(_request: WebResourceLike): Promise { + throw agentNotSupportedInBrowser; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts new file mode 100644 index 0000000..624a14b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { AgentSettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +export function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new AgentPolicy(nextPolicy, options, agentSettings!); + }, + }; +} + +export class AgentPolicy extends BaseRequestPolicy { + agentSettings: AgentSettings; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + agentSettings: AgentSettings + ) { + super(nextPolicy, options); + this.agentSettings = agentSettings; + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts new file mode 100644 index 0000000..f14f78c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts @@ -0,0 +1,294 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { OperationResponse } from "../operationResponse"; +import { OperationSpec, isStreamOperation } from "../operationSpec"; +import { RestError } from "../restError"; +import { Mapper, MapperType } from "../serializer"; +import * as utils from "../util/utils"; +import { parseXML } from "../util/xml"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} + +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy( + deserializationContentTypes?: DeserializationContentTypes +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} + +export const defaultJsonContentTypes = ["application/json", "text/json"]; +export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export class DeserializationPolicy extends BaseRequestPolicy { + public readonly jsonContentTypes: string[]; + public readonly xmlContentTypes: string[]; + + constructor( + nextPolicy: RequestPolicy, + deserializationContentTypes: DeserializationContentTypes | undefined, + options: RequestPolicyOptionsLike + ) { + super(nextPolicy, options); + + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + } + + public async sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request) + .then((response: HttpOperationResponse) => + deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response) + ); + } +} + +function getOperationResponse( + parsedResponse: HttpOperationResponse +): undefined | OperationResponse { + let result: OperationResponse | undefined; + const request: WebResourceLike = parsedResponse.request; + const operationSpec: OperationSpec | undefined = request.operationSpec; + if (operationSpec) { + const operationResponseGetter: + | undefined + | (( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse) = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} + +function shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean { + const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) = + parsedResponse.request.shouldDeserialize; + let result: boolean; + if (shouldDeserialize === undefined) { + result = true; + } else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } else { + result = shouldDeserialize(parsedResponse); + } + return result; +} + +export function deserializeResponseBody( + jsonContentTypes: string[], + xmlContentTypes: string[], + response: HttpOperationResponse +): Promise { + return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => { + const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + const statusCode: number = parsedResponse.status; + + const expectedStatusCodes: string[] = Object.keys(operationSpec.responses); + + const hasNoExpectedStatusCodes: boolean = + expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + + const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse); + + const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + const defaultResponseSpec: OperationResponse = operationSpec.responses.default; + if (defaultResponseSpec) { + const initialErrorMessage: string = isStreamOperation(operationSpec) + ? `Unexpected status code: ${statusCode}` + : (parsedResponse.bodyAsText as string); + + const error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = utils.stripRequest(parsedResponse.request); + error.response = utils.stripResponse(parsedResponse); + + let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + const defaultResponseBodyMapper: Mapper | undefined = + defaultResponseSpec.bodyMapper; + if ( + defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError" + ) { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } else { + let internalError: any = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + + if (defaultResponseBodyMapper) { + let valueToDeserialize: any = parsedErrorResponse; + if ( + operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence + ) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!] + : []; + } + error.body = operationSpec.serializer.deserialize( + defaultResponseBodyMapper, + valueToDeserialize, + "error.body" + ); + } + } + } catch (defaultError) { + error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`; + } + return Promise.reject(error); + } + } else if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize: any = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize( + responseSpec.bodyMapper, + valueToDeserialize, + "operationRes.parsedBody" + ); + } catch (error) { + const restError = new RestError( + `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}` + ); + restError.request = utils.stripRequest(parsedResponse.request); + restError.response = utils.stripResponse(parsedResponse); + return Promise.reject(restError); + } + } else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize( + responseSpec.headersMapper, + parsedResponse.headers.rawHeaders(), + "operationRes.parsedHeaders" + ); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} + +function parse( + jsonContentTypes: string[], + xmlContentTypes: string[], + operationResponse: HttpOperationResponse +): Promise { + const errorHandler = (err: Error & { code: string }) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError( + msg, + errCode, + operationResponse.status, + operationResponse.request, + operationResponse, + operationResponse.bodyAsText + ); + return Promise.reject(e); + }; + + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType: string = operationResponse.headers.get("Content-Type") || ""; + const contentComponents: string[] = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if ( + contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1) + ) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + + return Promise.resolve(operationResponse); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts new file mode 100644 index 0000000..74aeb60 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts @@ -0,0 +1,220 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { RestError } from "../restError"; + +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} + +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} + +export function exponentialRetryPolicy( + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ExponentialRetryPolicy( + nextPolicy, + options, + retryCount, + retryInterval, + minRetryInterval, + maxRetryInterval + ); + }, + }; +} + +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +const DEFAULT_CLIENT_RETRY_COUNT = 3; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The minimum retry interval in milliseconds. + */ + minRetryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number + ) { + super(nextPolicy, options); + function isNumber(n: any): n is number { + return typeof n === "number"; + } + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry(this, request, response)) + .catch((error) => retry(this, request, error.response, undefined, error)); + } +} + +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry( + policy: ExponentialRetryPolicy, + statusCode: number | undefined, + retryData: RetryData +): boolean { + if ( + statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505 + ) { + return false; + } + + let currentCount: number; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } else { + currentCount = retryData && retryData.retryCount; + } + + return currentCount < policy.retryCount; +} + +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData( + policy: ExponentialRetryPolicy, + retryData?: RetryData, + err?: RetryError +): RetryData { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + + retryData.error = err; + } + + // Adjust retry count + retryData.retryCount++; + + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount) - 1; + const boundedRandDelta = + policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + + retryData.retryInterval = Math.min( + policy.minRetryInterval + incrementDelta, + policy.maxRetryInterval + ); + + return retryData; +} + +function retry( + policy: ExponentialRetryPolicy, + request: WebResourceLike, + response?: HttpOperationResponse, + retryData?: RetryData, + requestError?: RetryError +): Promise { + retryData = updateRetryData(policy, retryData, requestError); + const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return utils + .delay(retryData.retryInterval) + .then(() => policy._nextPolicy.sendRequest(request.clone())) + .then((res) => retry(policy, request, res, retryData, undefined)) + .catch((err) => retry(policy, request, response, retryData, err)); + } else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = + retryData.error || + new RestError( + "Failed to send the request.", + RestError.REQUEST_SEND_ERROR, + response && response.status, + response && response.request, + response + ); + return Promise.reject(err); + } else { + return Promise.resolve(response); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts new file mode 100644 index 0000000..aa17a31 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function generateClientRequestIdPolicy( + requestIdHeaderName = "x-ms-client-request-id" +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} + +export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + private _requestIdHeaderName: string + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, utils.generateUuid()); + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts new file mode 100644 index 0000000..90d44ca --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function logPolicy(logger: any = console.log): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} + +export class LogPolicy extends BaseRequestPolicy { + logger?: any; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + logger: any = console.log + ) { + super(nextPolicy, options); + this.logger = logger; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response)); + } +} + +function logResponse( + policy: LogPolicy, + response: HttpOperationResponse +): Promise { + policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`); + policy.logger(`>> Response status code: ${response.status}`); + const responseBody = response.bodyAsText; + policy.logger(`>> Body: ${responseBody}`); + return Promise.resolve(response); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts new file mode 100644 index 0000000..9b688a4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/* + * NOTE: When moving this file, please update "browser" section in package.json + * and "plugins" section in webpack.testconfig.ts. + */ + +import { TelemetryInfo } from "./userAgentPolicy"; + +interface NavigatorEx extends Navigator { + // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2 + readonly oscpu: string; +} + +export function getDefaultUserAgentKey(): string { + return "x-ms-command-name"; +} + +export function getPlatformSpecificData(): TelemetryInfo[] { + const navigator = self.navigator as NavigatorEx; + const osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + + return [osInfo]; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts new file mode 100644 index 0000000..ea523dd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as os from "os"; +import { TelemetryInfo } from "./userAgentPolicy"; +import { Constants } from "../util/constants"; + +export function getDefaultUserAgentKey(): string { + return Constants.HeaderConstants.USER_AGENT; +} + +export function getPlatformSpecificData(): TelemetryInfo[] { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + + const osInfo = { + key: "OS", + value: `(${os.arch()}-${os.type()}-${os.release()})`, + }; + + return [runtimeInfo, osInfo]; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts new file mode 100644 index 0000000..a1c194d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ProxySettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +const proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); + +export function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined { + return undefined; +} + +export function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory { + return { + create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => { + throw proxyNotSupportedInBrowser; + }, + }; +} + +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) { + super(nextPolicy, options); + throw proxyNotSupportedInBrowser; + } + + public sendRequest(_request: WebResourceLike): Promise { + throw proxyNotSupportedInBrowser; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts new file mode 100644 index 0000000..bda5960 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; + +/** + * @internal + */ +export const noProxyList: string[] = loadNoProxy(); +const byPassedList: Map = new Map(); + +/** + * @internal + */ +export function getEnvironmentValue(name: string): string | undefined { + if (process.env[name]) { + return process.env[name]; + } else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} + +function loadEnvironmentProxyValue(): string | undefined { + if (!process) { + return undefined; + } + + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + + return httpsProxy || allProxy || httpProxy; +} + +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri: string): boolean | undefined { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost()!; + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} + +/** + * @internal + */ +export function loadNoProxy(): string[] { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + + return []; +} + +/** + * @internal + */ +function extractAuthFromUrl( + url: string +): { username?: string; password?: string; urlWithoutAuth: string } { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} + +export function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} + +export function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ProxyPolicy(nextPolicy, options, proxySettings!); + }, + }; +} + +export class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + proxySettings: ProxySettings + ) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts new file mode 100644 index 0000000..fd158d1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { URLBuilder } from "../url"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /* + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + + /* + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} + +export const DefaultRedirectOptions: RedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; + +export function redirectPolicy(maximumRetries = 20): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} + +export class RedirectPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + readonly maxRetries = 20 + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} + +function handleRedirect( + policy: RedirectPolicy, + response: HttpOperationResponse, + currentRetries: number +): Promise { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if ( + locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries)) + ) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)) + .then((res) => recordRedirect(res, request.url)); + } + + return Promise.resolve(response); +} + +function recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts new file mode 100644 index 0000000..d25d314 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { WebResourceLike } from "../webResource"; + +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; + +export interface RequestPolicy { + sendRequest(httpRequest: WebResourceLike): Promise; +} + +export abstract class BaseRequestPolicy implements RequestPolicy { + protected constructor( + readonly _nextPolicy: RequestPolicy, + readonly _options: RequestPolicyOptionsLike + ) {} + + public abstract sendRequest(webResource: WebResourceLike): Promise; + + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + public shouldLog(logLevel: HttpPipelineLogLevel): boolean { + return this._options.shouldLog(logLevel); + } + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + public log(logLevel: HttpPipelineLogLevel, message: string): void { + this._options.log(logLevel, message); + } +} + +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} + +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export class RequestPolicyOptions implements RequestPolicyOptionsLike { + constructor(private _logger?: HttpPipelineLogger) {} + + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + public shouldLog(logLevel: HttpPipelineLogLevel): boolean { + return ( + !!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel + ); + } + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + public log(logLevel: HttpPipelineLogLevel, message: string): void { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts new file mode 100644 index 0000000..47f11f3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} + +export class RPRegistrationPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + readonly _retryTimeout = 30 + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} + +function registerIfNeeded( + policy: RPRegistrationPolicy, + request: WebResourceLike, + response: HttpOperationResponse +): Promise { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText as string); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return ( + registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + }) + ); + } + } + + return Promise.resolve(response); +} + +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials( + originalRequest: WebResourceLike, + reuseUrlToo = false +): WebResourceLike { + const reqOptions: WebResourceLike = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + + return reqOptions; +} + +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body: string): string { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } catch (err) { + // do nothing; + } + if ( + responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration" + ) { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} + +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url: string): string { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} + +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP( + policy: RPRegistrationPolicy, + urlPrefix: string, + provider: string, + originalRequest: WebResourceLike +): Promise { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + + return policy._nextPolicy.sendRequest(reqOptions).then((response) => { + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} + +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus( + policy: RPRegistrationPolicy, + url: string, + originalRequest: WebResourceLike +): Promise { + const reqOptions: any = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + + return policy._nextPolicy.sendRequest(reqOptions).then((res) => { + const obj = res.parsedBody as any; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } else { + return utils + .delay(policy._retryTimeout * 1000) + .then(() => getRegistrationStatus(policy, url, originalRequest)); + } + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts new file mode 100644 index 0000000..91b3184 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicyFactory, + RequestPolicy, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function signingPolicy( + authenticationProvider: ServiceClientCredentials +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} + +export class SigningPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + public authenticationProvider: ServiceClientCredentials + ) { + super(nextPolicy, options); + } + + signRequest(request: WebResourceLike): Promise { + return this.authenticationProvider.signRequest(request); + } + + public sendRequest(request: WebResourceLike): Promise { + return this.signRequest(request).then((nextRequest) => + this._nextPolicy.sendRequest(nextRequest) + ); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts new file mode 100644 index 0000000..9edc5ee --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} + +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} + +export function systemErrorRetryPolicy( + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new SystemErrorRetryPolicy( + nextPolicy, + options, + retryCount, + retryInterval, + minRetryInterval, + maxRetryInterval + ); + }, + }; +} + +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +export class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + DEFAULT_CLIENT_RETRY_COUNT = 3; + DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number + ) { + super(nextPolicy, options); + this.retryCount = typeof retryCount === "number" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = + typeof retryInterval === "number" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} + +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean { + let currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} + +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData( + policy: SystemErrorRetryPolicy, + retryData?: RetryData, + err?: RetryError +): RetryData { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + + retryData.error = err; + } + + // Adjust retry count + retryData.retryCount++; + + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount) - 1; + const boundedRandDelta = + policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + + retryData.retryInterval = Math.min( + policy.minRetryInterval + incrementDelta, + policy.maxRetryInterval + ); + + return retryData; +} + +async function retry( + policy: SystemErrorRetryPolicy, + request: WebResourceLike, + operationResponse: HttpOperationResponse, + err?: RetryError, + retryData?: RetryData +): Promise { + retryData = updateRetryData(policy, retryData, err); + if ( + err && + err.code && + shouldRetry(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT") + ) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await utils.delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } catch (error) { + return retry(policy, request, operationResponse, error, retryData); + } + } else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts new file mode 100644 index 0000000..20d77cd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyOptionsLike, + RequestPolicyFactory, +} from "./requestPolicy"; +import { WebResourceLike } from "../webResource"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Constants } from "../util/constants"; +import { delay } from "../util/utils"; + +const StatusCodes = Constants.HttpConstants.StatusCodes; +const DEFAULT_RETRY_COUNT = 3; + +/** + * Options that control how to retry on response status code 429. + */ +export interface ThrottlingRetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; +} + +export function throttlingRetryPolicy( + maxRetries: number = DEFAULT_RETRY_COUNT +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} + +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export class ThrottlingRetryPolicy extends BaseRequestPolicy { + private retryLimit: number; + + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) { + super(nextPolicy, options); + this.retryLimit = retryLimit; + } + + public async sendRequest(httpRequest: WebResourceLike): Promise { + return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => { + return this.retry(httpRequest, response, 0); + }); + } + + private async retry( + httpRequest: WebResourceLike, + httpResponse: HttpOperationResponse, + retryCount: number + ): Promise { + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return httpResponse; + } + + const retryAfterHeader: string | undefined = httpResponse.headers.get( + Constants.HeaderConstants.RETRY_AFTER + ); + + if (retryAfterHeader && retryCount < this.retryLimit) { + const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader( + retryAfterHeader + ); + if (delayInMs) { + await delay(delayInMs); + const res = await this._nextPolicy.sendRequest(httpRequest); + return this.retry(httpRequest, res, retryCount + 1); + } + } + + return httpResponse; + } + + public static parseRetryAfterHeader(headerValue: string): number | undefined { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } else { + return retryAfterInSeconds * 1000; + } + } + + public static parseDateRetryAfterHeader(headerValue: string): number | undefined { + try { + const now: number = Date.now(); + const date: number = Date.parse(headerValue); + const diff = date - now; + + return Number.isNaN(diff) ? undefined : diff; + } catch (error) { + return undefined; + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts new file mode 100644 index 0000000..c4e70db --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export type TelemetryInfo = { key?: string; value?: string }; + +function getRuntimeInfo(): TelemetryInfo[] { + const msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + + return [msRestRuntime]; +} + +function getUserAgentString( + telemetryInfo: TelemetryInfo[], + keySeparator = " ", + valueSeparator = "/" +): string { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} + +export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; + +export function getDefaultUserAgentValue(): string { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} + +export function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory { + const key: string = + !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + const value: string = + !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} + +export class UserAgentPolicy extends BaseRequestPolicy { + constructor( + readonly _nextPolicy: RequestPolicy, + readonly _options: RequestPolicyOptionsLike, + protected headerKey: string, + protected headerValue: string + ) { + super(_nextPolicy, _options); + } + + sendRequest(request: WebResourceLike): Promise { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + + addUserAgentHeader(request: WebResourceLike): void { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts b/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts new file mode 100644 index 0000000..93dd369 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; + +import { ProxySettings } from "./serviceClient"; +import { URLBuilder } from "./url"; +import { HttpHeadersLike } from "./httpHeaders"; + +export type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent }; +export function createProxyAgent( + requestUrl: string, + proxySettings: ProxySettings, + headers?: HttpHeadersLike +): ProxyAgent { + const tunnelOptions: tunnel.HttpsOverHttpsOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost() as string, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } else if (proxySettings.username) { + tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`; + } + + const requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + const isRequestHttps = requestScheme.toLowerCase() === "https"; + const proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + const isProxyHttps = proxyScheme.toLowerCase() === "https"; + + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + + return proxyAgent; +} + +// Duplicate tunnel.HttpsOverHttpsOptions to avoid exporting createTunnel() with dependency on @types/tunnel +// createIunnel() is only imported by tests. +export interface HttpsProxyOptions { + host: string; + port: number; + localAddress?: string; + proxyAuth?: string; + headers?: { [key: string]: any }; + ca?: Buffer[]; + servername?: string; + key?: Buffer; + cert?: Buffer; +} + +interface HttpsOverHttpsOptions { + maxSockets?: number; + ca?: Buffer[]; + key?: Buffer; + cert?: Buffer; + proxy?: HttpsProxyOptions; +} + +export function createTunnel( + isRequestHttps: boolean, + isProxyHttps: boolean, + tunnelOptions: HttpsOverHttpsOptions +): http.Agent | https.Agent { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } else { + return tunnel.httpOverHttp(tunnelOptions); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts b/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts new file mode 100644 index 0000000..ba6cca8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export enum QueryCollectionFormat { + Csv = ",", + Ssv = " ", + Tsv = "\t", + Pipes = "|", + Multi = "Multi", +} diff --git a/node_modules/@azure/ms-rest-js/lib/restError.ts b/node_modules/@azure/ms-rest-js/lib/restError.ts new file mode 100644 index 0000000..88479c4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/restError.ts @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; + +export class RestError extends Error { + static readonly REQUEST_SEND_ERROR: string = "REQUEST_SEND_ERROR"; + static readonly REQUEST_ABORTED_ERROR: string = "REQUEST_ABORTED_ERROR"; + static readonly PARSE_ERROR: string = "PARSE_ERROR"; + + code?: string; + statusCode?: number; + request?: WebResourceLike; + response?: HttpOperationResponse; + body?: any; + constructor( + message: string, + code?: string, + statusCode?: number, + request?: WebResourceLike, + response?: HttpOperationResponse, + body?: any + ) { + super(message); + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + this.body = body; + + Object.setPrototypeOf(this, RestError.prototype); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/serializer.ts b/node_modules/@azure/ms-rest-js/lib/serializer.ts new file mode 100644 index 0000000..9ea94fc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/serializer.ts @@ -0,0 +1,1063 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; + +export class Serializer { + constructor( + public readonly modelMappers: { [key: string]: any } = {}, + public readonly isXML?: boolean + ) {} + + validateConstraints(mapper: Mapper, value: any, objectName: string): void { + const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => { + throw new Error( + `"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.` + ); + }; + if (mapper.constraints && value != undefined) { + const { + ExclusiveMaximum, + ExclusiveMinimum, + InclusiveMaximum, + InclusiveMinimum, + MaxItems, + MaxLength, + MinItems, + MinLength, + MultipleOf, + Pattern, + UniqueItems, + } = mapper.constraints; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern: RegExp = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if ( + UniqueItems && + value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i) + ) { + failValidation("UniqueItems", UniqueItems); + } + } + } + + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + serialize(mapper: Mapper, object: any, objectName?: string): any { + let payload: any = {}; + const mapperType = mapper.type.name as string; + if (!objectName) { + objectName = mapper.serializedName!; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + + if (mapper.isConstant) { + object = mapper.defaultValue; + } + + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + + const { required, nullable } = mapper; + + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + + if (object == undefined) { + payload = object; + } else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } else if (mapperType.match(/^Enum$/gi) !== null) { + const enumMapper: EnumMapper = mapper as EnumMapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } else if ( + mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null + ) { + payload = serializeDateTypes(mapperType, object, objectName); + } else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName); + } else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName); + } else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName); + } + } + return payload; + } + + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + deserialize(mapper: Mapper, responseBody: any, objectName: string): any { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + + let payload: any; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName!; + } + + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName); + } else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } else if (responseBody === "false") { + payload = false; + } else { + payload = responseBody; + } + } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = base64.decodeString(responseBody); + } else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName); + } else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType( + this, + mapper as DictionaryMapper, + responseBody, + objectName + ); + } + } + + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + + return payload; + } +} + +function trimEnd(str: string, ch: string) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} + +function bufferToBase64Url(buffer: any): string | undefined { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} + +function base64UrlToByteArray(str: string): Uint8Array | undefined { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} + +function splitSerializeName(prop: string | undefined): string[] { + const classes: string[] = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + + return classes; +} + +function dateToUnixTime(d: string | Date): number | undefined { + if (!d) { + return undefined; + } + + if (typeof d.valueOf() === "string") { + d = new Date(d as string); + } + return Math.floor((d as Date).getTime() / 1000); +} + +function unixTimeToDate(n: number): Date | undefined { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} + +function serializeBasicTypes(typeName: string, objectName: string, value: any): any { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error( + `${objectName} with value "${value}" must be of type string and a valid uuid.` + ); + } + } else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } else if (typeName.match(/^Stream$/gi) !== null) { + const objectType = typeof value; + if ( + objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob) + ) { + throw new Error( + `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.` + ); + } + } + } + return value; +} + +function serializeEnumType(objectName: string, allowedValues: Array, value: any): any { + if (!allowedValues) { + throw new Error( + `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.` + ); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error( + `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify( + allowedValues + )}.` + ); + } + return value; +} + +function serializeByteArrayType(objectName: string, value: any): any { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} + +function serializeBase64UrlType(objectName: string, value: any): any { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} + +function serializeDateTypes(typeName: string, value: any, objectName: string) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } else if (typeName.match(/^DateTime$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } else if (typeName.match(/^UnixTime$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error( + `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.` + ); + } + value = dateToUnixTime(value); + } else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!utils.isDuration(value)) { + throw new Error( + `${objectName} must be a string in ISO 8601 format. Instead was "${value}".` + ); + } + value = value; + } + } + return value; +} + +function serializeSequenceType( + serializer: Serializer, + mapper: SequenceMapper, + object: any, + objectName: string +) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error( + `element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.` + ); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} + +function serializeDictionaryType( + serializer: Serializer, + mapper: DictionaryMapper, + object: any, + objectName: string +) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error( + `"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.` + ); + } + const tempDictionary: { [key: string]: any } = {}; + for (const key of Object.keys(object)) { + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} + +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties( + serializer: Serializer, + mapper: CompositeMapper, + objectName: string +): { [propertyName: string]: Mapper } { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const className = mapper.type.className; + if (!className) { + throw new Error( + `Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify( + mapper, + undefined, + 2 + )}".` + ); + } + + const modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${className}".`); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error( + `modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify( + modelMapper + )}" of type "${className}" for object "${objectName}".` + ); + } + } + + return modelProps; +} + +function serializeCompositeType( + serializer: Serializer, + mapper: CompositeMapper, + object: any, + objectName: string +) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + + if (object != undefined) { + const payload: any = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + + let propName: string | undefined; + let parentObject: any = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } else { + const paths = splitSerializeName(propertyMapper.serializedName!); + propName = paths.pop(); + + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + + if (parentObject != undefined) { + const propertyObjectName = + propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if ( + polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined + ) { + toSerialize = mapper.serializedName; + } + + const serializedValue = serializer.serialize( + propertyMapper, + toSerialize, + propertyObjectName + ); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue }; + } else { + parentObject[propName] = serializedValue; + } + } + } + } + + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize( + additionalPropertiesMapper, + object[clientPropName], + objectName + '["' + clientPropName + '"]' + ); + } + } + } + + return payload; + } + return object; +} + +function isSpecialXmlProperty(propertyName: string): boolean { + return ["$", "_"].includes(propertyName); +} + +function deserializeCompositeType( + serializer: Serializer, + mapper: CompositeMapper, + responseBody: any, + objectName: string +): any { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance: { [key: string]: any } = {}; + const handledPropertyNames: string[] = []; + + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName!); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + + const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary: any = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize( + (propertyMapper as DictionaryMapper).type.value, + responseBody[headerKey], + propertyObjectName + ); + } + + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize( + propertyMapper, + responseBody.$[xmlName!], + propertyObjectName + ); + } else { + const propertyName = xmlElementName || xmlName || serializedName; + let unwrappedProperty = responseBody[propertyName!]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName!]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!]; + + const isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize( + propertyMapper, + unwrappedProperty, + propertyObjectName + ); + } + } else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if ( + polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined + ) { + propertyInstance = mapper.serializedName; + } + + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize( + propertyMapper, + propertyInstance, + propertyObjectName + ); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [key, value] of Object.entries(instance)) { + if (!arrayInstance.hasOwnProperty(key)) { + arrayInstance[key] = value; + } + } + instance = arrayInstance; + } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize( + propertyMapper, + propertyInstance, + propertyObjectName + ); + instance[key] = serializedValue; + } + } + } + + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName: string) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize( + additionalPropertiesMapper, + responseBody[responsePropName], + objectName + '["' + responsePropName + '"]' + ); + } + } + } else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if ( + instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key) + ) { + instance[key] = responseBody[key]; + } + } + } + + return instance; +} + +function deserializeDictionaryType( + serializer: Serializer, + mapper: DictionaryMapper, + responseBody: any, + objectName: string +): any { + /*jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error( + `"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}` + ); + } + if (responseBody) { + const tempDictionary: { [key: string]: any } = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} + +function deserializeSequenceType( + serializer: Serializer, + mapper: SequenceMapper, + responseBody: any, + objectName: string +): any { + /*jshint validthis: true */ + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error( + `element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}` + ); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`); + } + return tempArray; + } + return responseBody; +} + +function getPolymorphicMapper( + serializer: Serializer, + mapper: CompositeMapper, + object: any, + polymorphicPropertyName: "clientName" | "serializedName" +): CompositeMapper { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = + discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} + +function getPolymorphicDiscriminatorRecursively( + serializer: Serializer, + mapper: CompositeMapper +): PolymorphicDiscriminator | undefined { + return ( + mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className) + ); +} + +function getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) { + return ( + typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator + ); +} + +export interface MapperConstraints { + InclusiveMaximum?: number; + ExclusiveMaximum?: number; + InclusiveMinimum?: number; + ExclusiveMinimum?: number; + MaxLength?: number; + MinLength?: number; + Pattern?: RegExp; + MaxItems?: number; + MinItems?: number; + UniqueItems?: true; + MultipleOf?: number; +} + +export type MapperType = + | SimpleMapperType + | CompositeMapperType + | SequenceMapperType + | DictionaryMapperType + | EnumMapperType; + +export interface SimpleMapperType { + name: + | "Base64Url" + | "Boolean" + | "ByteArray" + | "Date" + | "DateTime" + | "DateTimeRfc1123" + | "Object" + | "Stream" + | "String" + | "TimeSpan" + | "UnixTime" + | "Uuid" + | "Number" + | "any"; +} + +export interface CompositeMapperType { + name: "Composite"; + + // Only one of the two below properties should be present. + // Use className to reference another type definition, + // and use modelProperties/additionalProperties when the reference to the other type has been resolved. + className?: string; + + modelProperties?: { [propertyName: string]: Mapper }; + additionalProperties?: Mapper; + + uberParent?: string; + polymorphicDiscriminator?: PolymorphicDiscriminator; +} + +export interface SequenceMapperType { + name: "Sequence"; + element: Mapper; +} + +export interface DictionaryMapperType { + name: "Dictionary"; + value: Mapper; +} + +export interface EnumMapperType { + name: "Enum"; + allowedValues: any[]; +} + +export interface BaseMapper { + xmlName?: string; + xmlIsAttribute?: boolean; + xmlElementName?: string; + xmlIsWrapped?: boolean; + readOnly?: boolean; + isConstant?: boolean; + required?: boolean; + nullable?: boolean; + serializedName?: string; + type: MapperType; + defaultValue?: any; + constraints?: MapperConstraints; +} + +export type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; + +export interface PolymorphicDiscriminator { + serializedName: string; + clientName: string; + [key: string]: string; +} + +export interface CompositeMapper extends BaseMapper { + type: CompositeMapperType; +} + +export interface SequenceMapper extends BaseMapper { + type: SequenceMapperType; +} + +export interface DictionaryMapper extends BaseMapper { + type: DictionaryMapperType; + headerCollectionPrefix?: string; +} + +export interface EnumMapper extends BaseMapper { + type: EnumMapperType; +} + +export interface UrlParameterValue { + value: string; + skipUrlEncoding: boolean; +} + +// TODO: why is this here? +export function serializeObject(toSerialize: any): any { + if (toSerialize == undefined) return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } else if (typeof toSerialize === "object") { + const dictionary: { [key: string]: any } = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} + +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o: Array): { [K in T]: K } { + const result: any = {}; + for (const key of o) { + result[key] = key; + } + return result; +} + +export const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); diff --git a/node_modules/@azure/ms-rest-js/lib/serviceClient.ts b/node_modules/@azure/ms-rest-js/lib/serviceClient.ts new file mode 100644 index 0000000..e6e3218 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/serviceClient.ts @@ -0,0 +1,852 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { TokenCredential, isTokenCredential } from "@azure/core-auth"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +import { DefaultHttpClient } from "./defaultHttpClient"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { OperationArguments } from "./operationArguments"; +import { + getPathStringFromParameter, + getPathStringFromParameterPath, + OperationParameter, + ParameterPath, +} from "./operationParameter"; +import { isStreamOperation, OperationSpec } from "./operationSpec"; +import { + deserializationPolicy, + DeserializationContentTypes, +} from "./policies/deserializationPolicy"; +import { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { + userAgentPolicy, + getDefaultUserAgentHeaderName, + getDefaultUserAgentValue, +} from "./policies/userAgentPolicy"; +import { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptions, + RequestPolicyOptionsLike, +} from "./policies/requestPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from "./serializer"; +import { URLBuilder } from "./url"; +import * as utils from "./util/utils"; +import { stringifyXML } from "./util/xml"; +import { + RequestOptionsBase, + RequestPrepareOptions, + WebResourceLike, + isWebResourceLike, + WebResource, +} from "./webResource"; +import { OperationResponse } from "./operationResponse"; +import { ServiceCallback } from "./util/utils"; +import { agentPolicy } from "./policies/agentPolicy"; +import { proxyPolicy, getDefaultProxySettings } from "./policies/proxyPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { Agent } from "http"; +import { + AzureIdentityCredentialAdapter, + azureResourceManagerEndpoints, +} from "./credentials/azureIdentityTokenCredentialAdapter"; + +/** + * HTTP proxy settings (Node.js only) + */ +export interface ProxySettings { + host: string; + port: number; + username?: string; + password?: string; +} + +/** + * HTTP and HTTPS agents (Node.js only) + */ +export interface AgentSettings { + http: Agent; + https: Agent; +} + +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: + | RequestPolicyFactory[] + | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-command-name" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only). + */ + agentSettings?: AgentSettings; + /** + * If specified: + * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient. + * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to "https://management.azure.com/.default". + * + * If it is not specified: + * - All OperationSpecs must contain a baseUrl property. + * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be "https://management.azure.com/.default". + */ + baseUri?: string; +} + +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +export class ServiceClient { + /** + * The base URI against which requests will be made when using this ServiceClient instance. + * + * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient. + * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default "https://management.azure.com/.default" + * + * If it is not specified, all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient: HttpClient; + private readonly _requestPolicyOptions: RequestPolicyOptionsLike; + + private readonly _requestPolicyFactories: RequestPolicyFactory[]; + private readonly _withCredentials: boolean; + + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + constructor( + credentials?: ServiceClientCredentials | TokenCredential, + options?: ServiceClientOptions + ) { + if (!options) { + options = {}; + } + + if (options.baseUri) { + this.baseUri = options.baseUri; + } + + let serviceClientCredentials: ServiceClientCredentials | undefined; + if (isTokenCredential(credentials)) { + let scope: string | undefined = undefined; + if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) { + scope = `${options.baseUri}/.default`; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } else { + serviceClientCredentials = credentials; + } + + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new DefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + + let requestPolicyFactories: RequestPolicyFactory[]; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } else { + requestPolicyFactories = createDefaultRequestPolicyFactories( + serviceClientCredentials, + options + ); + if (options.requestPolicyFactories) { + const newRequestPolicyFactories: + | void + | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + + let httpRequest: WebResourceLike; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } catch (error) { + return Promise.reject(error); + } + + let httpPipeline: RequestPolicy = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create( + httpPipeline, + this._requestPolicyOptions + ); + } + } + return httpPipeline.sendRequest(httpRequest); + } + + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + sendOperationRequest( + operationArguments: OperationArguments, + operationSpec: OperationSpec, + callback?: ServiceCallback + ): Promise { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + + const httpRequest = new WebResource(); + + let result: Promise; + try { + const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error( + "If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use." + ); + } + + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + + const requestUrl: URLBuilder = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue: string = getOperationArgumentValueFromParameter( + this, + operationArguments, + urlParameter, + operationSpec.serializer + ); + urlParameterValue = operationSpec.serializer.serialize( + urlParameter.mapper, + urlParameterValue, + getPathStringFromParameter(urlParameter) + ); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll( + `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, + urlParameterValue + ); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue: any = getOperationArgumentValueFromParameter( + this, + operationArguments, + queryParameter, + operationSpec.serializer + ); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize( + queryParameter.mapper, + queryParameterValue, + getPathStringFromParameter(queryParameter) + ); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } else if ( + queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv + ) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if ( + queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null + ) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if ( + queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv + ) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter( + queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), + queryParameterValue + ); + } + } + } + httpRequest.url = requestUrl.toString(); + + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue: any = getOperationArgumentValueFromParameter( + this, + operationArguments, + headerParameter, + operationSpec.serializer + ); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize( + headerParameter.mapper, + headerValue, + getPathStringFromParameter(headerParameter) + ); + const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper) + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } else { + httpRequest.headers.set( + headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), + headerValue + ); + } + } + } + } + + const options: RequestOptionsBase | undefined = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + + httpRequest.withCredentials = this._withCredentials; + + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + + result = this.sendRequest(httpRequest).then((res) => + flattenResponse(res, operationSpec.responses[res.status]) + ); + } catch (error) { + result = Promise.reject(error); + } + + const cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + + return result; + } +} + +export function serializeRequestBody( + serviceClient: ServiceClient, + httpRequest: WebResourceLike, + operationArguments: OperationArguments, + operationSpec: OperationSpec +): void { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter( + serviceClient, + operationArguments, + operationSpec.requestBody, + operationSpec.serializer + ); + + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + const requestBodyParameterPathString: string = getPathStringFromParameter( + operationSpec.requestBody + ); + httpRequest.body = operationSpec.serializer.serialize( + bodyMapper, + httpRequest.body, + requestBodyParameterPathString + ); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML( + utils.prepareXMLRootList( + httpRequest.body, + xmlElementName || xmlName || serializedName! + ), + { rootName: xmlName || serializedName } + ); + } else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } catch (error) { + throw new Error( + `Error "${error.message}" occurred in serializing the payload - ${JSON.stringify( + serializedName, + undefined, + " " + )}.` + ); + } + } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue: any = getOperationArgumentValueFromParameter( + serviceClient, + operationArguments, + formDataParameter, + operationSpec.serializer + ); + if (formDataParameterValue != undefined) { + const formDataParameterPropertyName: string = + formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize( + formDataParameter.mapper, + formDataParameterValue, + getPathStringFromParameter(formDataParameter) + ); + } + } + } +} + +function isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory { + return typeof instance.create === "function"; +} + +function getValueOrFunctionResult( + value: undefined | string | ((defaultValue: string) => string), + defaultValueCreator: () => string +): string { + let result: string; + if (typeof value === "string") { + result = value; + } else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} + +function createDefaultRequestPolicyFactories( + credentials: ServiceClientCredentials | RequestPolicyFactory | undefined, + options: ServiceClientOptions +): RequestPolicyFactory[] { + const factories: RequestPolicyFactory[] = []; + + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } else { + factories.push(signingPolicy(credentials)); + } + } + + const userAgentHeaderName: string = getValueOrFunctionResult( + options.userAgentHeaderName, + getDefaultUserAgentHeaderName + ); + const userAgentHeaderValue: string = getValueOrFunctionResult( + options.userAgent, + getDefaultUserAgentValue + ); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + + const redirectOptions = { + ...DefaultRedirectOptions, + ...options.redirectOptions, + }; + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + + factories.push(deserializationPolicy(options.deserializationContentTypes)); + + const proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + + return factories; +} + +export type PropertyParent = { [propertyName: string]: any }; + +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent { + if (parent && propertyPath) { + const propertyPathLength: number = propertyPath.length; + for (let i = 0; i < propertyPathLength - 1; ++i) { + const propertyName: string = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} + +function getOperationArgumentValueFromParameter( + serviceClient: ServiceClient, + operationArguments: OperationArguments, + parameter: OperationParameter, + serializer: Serializer +): any { + return getOperationArgumentValueFromParameterPath( + serviceClient, + operationArguments, + parameter.parameterPath, + parameter.mapper, + serializer + ); +} + +export function getOperationArgumentValueFromParameterPath( + serviceClient: ServiceClient, + operationArguments: OperationArguments, + parameterPath: ParameterPath, + parameterMapper: Mapper, + serializer: Serializer +): any { + let value: any; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } else { + let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath( + operationArguments, + parameterPath + ); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + + // Serialize just for validation purposes. + const parameterPathString: string = getPathStringFromParameterPath( + parameterPath, + parameterMapper + ); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } else { + if (parameterMapper.required) { + value = {}; + } + + for (const propertyName in parameterPath) { + const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![ + propertyName + ]; + const propertyPath: ParameterPath = parameterPath[propertyName]; + const propertyValue: any = getOperationArgumentValueFromParameterPath( + serviceClient, + operationArguments, + propertyPath, + propertyMapper, + serializer + ); + // Serialize just for validation purposes. + const propertyPathString: string = getPathStringFromParameterPath( + propertyPath, + propertyMapper + ); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} + +interface PropertySearchResult { + propertyValue?: any; + propertyFound: boolean; +} + +function getPropertyFromParameterPath( + parent: { [parameterName: string]: any }, + parameterPath: string[] +): PropertySearchResult { + const result: PropertySearchResult = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart: string = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} + +export function flattenResponse( + _response: HttpOperationResponse, + responseSpec: OperationResponse | undefined +): RestResponse { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + + const addOperationResponse = (obj: {}) => + Object.defineProperty(obj, "_response", { + value: _response, + }); + + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse({ + ...parsedHeaders, + blobBody: _response.blobBody, + readableStreamBody: _response.readableStreamBody, + }); + } + + const modelProperties = + (typeName === "Composite" && (bodyMapper as CompositeMapper).type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some( + (k) => modelProperties[k].serializedName === "" + ); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + const arrayResponse = [...parsedBody] as RestResponse & any[]; + + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse({ + ...parsedHeaders, + ..._response.parsedBody, + }); + } + } + + if ( + bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody) + ) { + // primitive body types and HEAD booleans + return addOperationResponse({ + ...parsedHeaders, + body: _response.parsedBody, + }); + } + + return addOperationResponse({ + ...parsedHeaders, + ..._response.parsedBody, + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/url.ts b/node_modules/@azure/ms-rest-js/lib/url.ts new file mode 100644 index 0000000..74d5502 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/url.ts @@ -0,0 +1,659 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { replaceAll } from "./util/utils"; + +type URLQueryParseState = "ParameterName" | "ParameterValue" | "Invalid"; + +/** + * A class that handles the query portion of a URLBuilder. + */ +export class URLQuery { + private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {}; + + /** + * Get whether or not there any query parameters in this URLQuery. + */ + public any(): boolean { + return Object.keys(this._rawQuery).length > 0; + } + + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + public set(parameterName: string, parameterValue: any): void { + if (parameterName) { + if (parameterValue != undefined) { + const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } else { + delete this._rawQuery[parameterName]; + } + } + } + + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + public get(parameterName: string): string | string[] | undefined { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + public toString(): string { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + + /** + * Parse a URLQuery from the provided text. + */ + public static parse(text: string): URLQuery { + const result = new URLQuery(); + + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + + let currentState: URLQueryParseState = "ParameterName"; + + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter: string = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + + case "&": + parameterName = ""; + parameterValue = ""; + break; + + default: + parameterName += currentCharacter; + break; + } + break; + + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + + default: + parameterValue += currentCharacter; + break; + } + break; + + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + + return result; + } +} + +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export class URLBuilder { + private _scheme: string | undefined; + private _host: string | undefined; + private _port: string | undefined; + private _path: string | undefined; + private _query: URLQuery | undefined; + + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + public setScheme(scheme: string | undefined): void { + if (!scheme) { + this._scheme = undefined; + } else { + this.set(scheme, "SCHEME"); + } + } + + /** + * Get the scheme that has been set in this URL. + */ + public getScheme(): string | undefined { + return this._scheme; + } + + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + public setHost(host: string | undefined): void { + if (!host) { + this._host = undefined; + } else { + this.set(host, "SCHEME_OR_HOST"); + } + } + + /** + * Get the host that has been set in this URL. + */ + public getHost(): string | undefined { + return this._host; + } + + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + public setPort(port: number | string | undefined): void { + if (port == undefined || port === "") { + this._port = undefined; + } else { + this.set(port.toString(), "PORT"); + } + } + + /** + * Get the port that has been set in this URL. + */ + public getPort(): string | undefined { + return this._port; + } + + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + public setPath(path: string | undefined): void { + if (!path) { + this._path = undefined; + } else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } else { + this.set(path, "PATH"); + } + } + } + + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + public appendPath(path: string | undefined): void { + if (path) { + let currentPath: string | undefined = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + + if (path.startsWith("/")) { + path = path.substring(1); + } + + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + + /** + * Get the path that has been set in this URL. + */ + public getPath(): string | undefined { + return this._path; + } + + /** + * Set the query in this URL. + */ + public setQuery(query: string | undefined): void { + if (!query) { + this._query = undefined; + } else { + this._query = URLQuery.parse(query); + } + } + + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + public setQueryParameter(queryParameterName: string, queryParameterValue: any): void { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + public getQueryParameterValue(queryParameterName: string): string | string[] | undefined { + return this._query ? this._query.get(queryParameterName) : undefined; + } + + /** + * Get the query in this URL. + */ + public getQuery(): string | undefined { + return this._query ? this._query.toString() : undefined; + } + + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set(text: string, startState: URLTokenizerState): void { + const tokenizer = new URLTokenizer(text, startState); + + while (tokenizer.next()) { + const token: URLToken | undefined = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + + case "HOST": + this._host = token.text || undefined; + break; + + case "PORT": + this._port = token.text || undefined; + break; + + case "PATH": + const tokenPath: string | undefined = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + + public toString(): string { + let result = ""; + + if (this._scheme) { + result += `${this._scheme}://`; + } + + if (this._host) { + result += this._host; + } + + if (this._port) { + result += `:${this._port}`; + } + + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + + return result; + } + + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + public replaceAll(searchValue: string, replaceValue: string): void { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + + public static parse(text: string): URLBuilder { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} + +type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; + +type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; + +export class URLToken { + public constructor(public readonly text: string, public readonly type: URLTokenType) {} + + public static scheme(text: string): URLToken { + return new URLToken(text, "SCHEME"); + } + + public static host(text: string): URLToken { + return new URLToken(text, "HOST"); + } + + public static port(text: string): URLToken { + return new URLToken(text, "PORT"); + } + + public static path(text: string): URLToken { + return new URLToken(text, "PATH"); + } + + public static query(text: string): URLToken { + return new URLToken(text, "QUERY"); + } +} + +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character: string): boolean { + const characterCode: number = character.charCodeAt(0); + return ( + (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */ + ); +} + +/** + * A class that tokenizes URL strings. + */ +export class URLTokenizer { + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + + public constructor(readonly _text: string, state?: URLTokenizerState) { + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + public current(): URLToken | undefined { + return this._currentToken; + } + + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + public next(): boolean { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + + case "HOST": + nextHost(this); + break; + + case "PORT": + nextPort(this); + break; + + case "PATH": + nextPath(this); + break; + + case "QUERY": + nextQuery(this); + break; + + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} + +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer: URLTokenizer): string { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} + +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer: URLTokenizer): boolean { + return tokenizer._currentIndex < tokenizer._textLength; +} + +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer: URLTokenizer): string { + return tokenizer._text[tokenizer._currentIndex]; +} + +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer: URLTokenizer, step?: number): void { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} + +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string { + let endIndex: number = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} + +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string { + let result = ""; + + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter: string = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + + return result; +} + +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer: URLTokenizer): string { + return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character)); +} + +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string { + return readWhile( + tokenizer, + (character: string) => terminatingCharacters.indexOf(character) === -1 + ); +} + +function nextScheme(tokenizer: URLTokenizer): void { + const scheme: string = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else { + tokenizer._currentState = "HOST"; + } +} + +function nextSchemeOrHost(tokenizer: URLTokenizer): void { + const schemeOrHost: string = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } + } +} + +function nextHost(tokenizer: URLTokenizer): void { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + + const host: string = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextPort(tokenizer: URLTokenizer): void { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + + const port: string = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextPath(tokenizer: URLTokenizer): void { + const path: string = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextQuery(tokenizer: URLTokenizer): void { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + + const query: string = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts b/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts new file mode 100644 index 0000000..e6b5974 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value: string): string { + return btoa(value); +} + +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value: Uint8Array): string { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} + +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value: string): Uint8Array { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/base64.ts b/node_modules/@azure/ms-rest-js/lib/util/base64.ts new file mode 100644 index 0000000..ea0c2e0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/base64.ts @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value: string): string { + return Buffer.from(value).toString("base64"); +} + +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value: Uint8Array): string { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer); + return bufferValue.toString("base64"); +} + +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value: string): Uint8Array { + return Buffer.from(value, "base64"); +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/constants.ts b/node_modules/@azure/ms-rest-js/lib/util/constants.ts new file mode 100644 index 0000000..2cceec2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/constants.ts @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export const Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.7.0", + + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + + StatusCodes: { + TooManyRequests: 429, + }, + }, + + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + + AUTHORIZATION_SCHEME: "Bearer", + + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; diff --git a/node_modules/@azure/ms-rest-js/lib/util/utils.ts b/node_modules/@azure/ms-rest-js/lib/util/utils.ts new file mode 100644 index 0000000..0675100 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/utils.ts @@ -0,0 +1,288 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { v4 as uuidv4 } from "uuid"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +import { Constants } from "./constants"; + +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export const isNode = + typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; + +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck: { protocol: string }): boolean { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} + +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export function encodeUri(uri: string): string { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} + +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export function stripResponse(response: HttpOperationResponse): any { + const strippedResponse: any = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} + +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export function stripRequest(request: WebResourceLike): WebResourceLike { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} + +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid: string): boolean { + const validUuidRegex = new RegExp( + "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", + "ig" + ); + return validUuidRegex.test(uuid); +} + +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export function objectValues(obj: { [key: string]: any }): any[] { + const result: any[] = []; + if (obj && obj instanceof Object) { + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + result.push((obj)[key]); + } + } + } else { + throw new Error( + `The provided object ${JSON.stringify( + obj, + undefined, + 2 + )} is not a valid object that can be ` + `enumerated to provide its values as an array.` + ); + } + return result; +} + +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export function generateUuid(): string { + return uuidv4(); +} + +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories: Array, kickstart: any) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} + +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) { + Object.keys(source).forEach((key) => { + target[key] = source[key]; + }); + return target; +} + +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export function delay(t: number, value?: T): Promise { + return new Promise((resolve) => setTimeout(() => resolve(value), t)); +} + +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null. + * @param {TResult} [result] The deserialized response body if an error did not occur. + * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur. + * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur. + */ + ( + err: Error | RestError | null, + result?: TResult, + request?: WebResourceLike, + response?: HttpOperationResponse + ): void; +} + +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export function promiseToCallback(promise: Promise): Function { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb: Function): void => { + promise.then( + (data: any) => { + cb(undefined, data); + }, + (err: Error) => { + cb(err); + } + ); + }; +} + +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise: Promise): Function { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb: ServiceCallback): void => { + promise.then( + (data: HttpOperationResponse) => { + process.nextTick(cb, undefined, data.parsedBody as T, data.request, data); + }, + (err: Error) => { + process.nextTick(cb, err); + } + ); + }; +} + +export function prepareXMLRootList(obj: any, elementName: string) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + return { [elementName]: obj }; +} + +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtor: any, sourceCtors: any[]): void { + sourceCtors.forEach((sourceCtors) => { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} + +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export function isDuration(value: string): boolean { + return validateISODuration.test(value); +} + +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll( + value: string | undefined, + searchValue: string, + replaceValue: string +): string | undefined { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} + +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value: any): boolean { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts b/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts new file mode 100644 index 0000000..8b11845 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts @@ -0,0 +1,169 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +const parser = new DOMParser(); + +// Policy to make our code Trusted Types compliant. +// https://github.com/w3c/webappsec-trusted-types +// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. +// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing +// according to the spec. There are no HTML/XSS security concerns on the usage of +// parseFromString() here. +let ttPolicy: Pick | undefined; +try { + if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", { + createHTML: (s: any) => s, + }); + } +} catch (e) { + console.warn('Could not create trusted types policy "@azure/ms-rest-js#xml.browser"'); +} + +export function parseXML(str: string): Promise { + try { + const dom = parser.parseFromString((ttPolicy?.createHTML(str) ?? str) as string, "application/xml"); + throwIfError(dom); + + const obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } catch (err) { + return Promise.reject(err); + } +} + +let errorNS = ""; +try { + const invalidXML = (ttPolicy?.createHTML("INVALID") ?? "INVALID") as string; + errorNS = + parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI! ?? ""; +} catch (ignored) { + // Most browsers will return a document containing , but IE will throw. +} + +function throwIfError(dom: Document) { + if (errorNS) { + const parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0)!.innerHTML); + } + } +} + +function isElement(node: Node): node is Element { + return !!(node as Element).attributes; +} + +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node: Node): Element | undefined { + return isElement(node) && node.hasAttributes() ? node : undefined; +} + +function domToObject(node: Node): any { + let result: any = {}; + + const childNodeCount: number = node.childNodes.length; + + const firstChildNode: Node = node.childNodes[0]; + const onlyChildTextValue: string | undefined = + (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + + const elementWithAttributes: Element | undefined = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } else if (childNodeCount === 0) { + result = ""; + } else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + const childObject: any = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + + return result; +} + +// tslint:disable-next-line:no-null-keyword +const doc = document.implementation.createDocument(null, null, null); +const serializer = new XMLSerializer(); + +export function stringifyXML(obj: any, opts?: { rootName?: string }) { + const rootName = (opts && opts.rootName) || "root"; + const dom = buildNode(obj, rootName)[0]; + return ( + '' + serializer.serializeToString(dom) + ); +} + +function buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} + +function buildNode(obj: any, elementName: string): Node[] { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + const elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName)) { + result.push(child); + } + } + return result; + } else if (typeof obj === "object") { + const elem = doc.createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === "$") { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } else { + for (const child of buildNode(obj[key], key)) { + elem.appendChild(child); + } + } + } + return [elem]; + } else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/xml.ts b/node_modules/@azure/ms-rest-js/lib/util/xml.ts new file mode 100644 index 0000000..b11ff5c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/xml.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as xml2js from "xml2js"; + +export function stringifyXML(obj: any, opts?: { rootName?: string }) { + const builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} + +export function parseXML(str: string): Promise { + const xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } else { + xmlParser.parseString(str, (err: any, res: any) => { + if (err) { + reject(err); + } else { + resolve(res); + } + }); + } + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/webResource.ts b/node_modules/@azure/ms-rest-js/lib/webResource.ts new file mode 100644 index 0000000..10bb55f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/webResource.ts @@ -0,0 +1,668 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from "./httpHeaders"; +import { OperationSpec } from "./operationSpec"; +import { Mapper, Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { AgentSettings, ProxySettings } from "./serviceClient"; + +export type HttpMethods = + | "GET" + | "PUT" + | "POST" + | "DELETE" + | "PATCH" + | "HEAD" + | "OPTIONS" + | "TRACE"; +export type HttpRequestBody = + | Blob + | string + | ArrayBuffer + | ArrayBufferView + | (() => NodeJS.ReadableStream); + +/** + * Fired in response to upload or download progress. + */ +export type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; + +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + readonly aborted: boolean; + dispatchEvent: (event: Event) => boolean; + onabort: ((this: AbortSignalLike, ev: Event) => any) | null; + addEventListener: ( + type: "abort", + listener: (this: AbortSignalLike, ev: Event) => any, + options?: any + ) => void; + removeEventListener: ( + type: "abort", + listener: (this: AbortSignalLike, ev: Event) => any, + options?: any + ) => void; +} + +/** + * An abstraction over a REST call. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: ( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse; + formData?: any; + /** + * A query string represented as an object. + */ + query?: { [key: string]: any }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * HTTP(S) agent configuration. + */ + agentSettings?: AgentSettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + + /** + * Used to abort the request later. + */ + abortSignal?: AbortSignalLike; + + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} + +export function isWebResourceLike(object: any): object is WebResourceLike { + if (typeof object !== "object") { + return false; + } + if ( + typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function" + ) { + return true; + } + return false; +} + +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +export class WebResource { + url: string; + method: HttpMethods; + body?: any; + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: ( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse; + formData?: any; + query?: { [key: string]: any }; + operationSpec?: OperationSpec; + withCredentials: boolean; + timeout: number; + proxySettings?: ProxySettings; + keepAlive?: boolean; + agentSettings?: AgentSettings; + redirectLimit?: number; + + abortSignal?: AbortSignalLike; + + /** Callback which fires upon upload progress. */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + constructor( + url?: string, + method?: HttpMethods, + body?: any, + query?: { [key: string]: any }, + headers?: { [key: string]: any } | HttpHeadersLike, + streamResponseBody?: boolean, + withCredentials?: boolean, + abortSignal?: AbortSignalLike, + timeout?: number, + onUploadProgress?: (progress: TransferProgressEvent) => void, + onDownloadProgress?: (progress: TransferProgressEvent) => void, + proxySettings?: ProxySettings, + keepAlive?: boolean, + agentSettings?: AgentSettings, + redirectLimit?: number + ) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource { + if (!options) { + throw new Error("options object is required"); + } + + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + + if (options.url && options.pathTemplate) { + throw new Error( + "options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them." + ); + } + + if ( + (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string") + ) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error( + 'The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods) + ); + } + } + this.method = options.method.toUpperCase() as HttpMethods; + + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = + baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error( + `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.` + ); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = (pathParameters as { [key: string]: any })[pathParamName]; + if ( + pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object") + ) { + throw new Error( + `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in ${pathParameters} - ${JSON.stringify( + pathParameters, + undefined, + 2 + )}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.` + ); + } + + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error( + `options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.` + ); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error( + `options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.` + ); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam: any = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error( + `options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.` + ); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize( + options.serializationMapper, + options.body, + "requestBody" + ); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + + return this; + } + + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + clone(): WebResource { + const result = new WebResource( + this.url, + this.method, + this.body, + this.query, + this.headers && this.headers.clone(), + this.streamResponseBody, + this.withCredentials, + this.abortSignal, + this.timeout, + this.onUploadProgress, + this.onDownloadProgress, + this.proxySettings, + this.keepAlive, + this.agentSettings, + this.redirectLimit + ); + + if (this.formData) { + result.formData = this.formData; + } + + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + + return result; + } +} + +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: { "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } } + * - query-parameter-value in "string" format: { "query-parameter-name": "query-parameter-value"}. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { [key: string]: any | ParameterValue }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: { "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } } + * - path-parameter-value in "string" format: { "path-parameter-name": "path-parameter-value" }. + */ + pathParameters?: { [key: string]: any | ParameterValue }; + formData?: { [key: string]: any }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { [key: string]: any }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { [x: string]: any }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: object; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + abortSignal?: AbortSignalLike; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + + onUploadProgress?: (progress: TransferProgressEvent) => void; + onDownloadProgress?: (progress: TransferProgressEvent) => void; + streamResponseBody?: boolean; +} + +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + value: any; + skipUrlEncoding: boolean; + [key: string]: any; +} + +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * @property {object} [customHeaders] User defined custom request headers that + * will be applied before the request is sent. + */ + customHeaders?: { [key: string]: string }; + + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + [key: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts new file mode 100644 index 0000000..698d701 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpClient } from "./httpClient"; +import { HttpHeaders } from "./httpHeaders"; +import { WebResourceLike, TransferProgressEvent } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { RestError } from "./restError"; + +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export class XhrHttpClient implements HttpClient { + public sendRequest(request: WebResourceLike): Promise { + const xhr = new XMLHttpRequest(); + + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + + const abortSignal = request.abortSignal; + if (abortSignal) { + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + + if (request.formData) { + const formData = request.formData; + const requestForm = new FormData(); + const appendFormValue = (key: string, value: any) => { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm.append(key, value.value, value.options); + } else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } else { + appendFormValue(formKey, formValue); + } + } + + request.body = requestForm; + request.formData = undefined; + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const header of request.headers.headersArray()) { + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + + if (request.streamResponseBody) { + return new Promise((resolve, reject) => { + xhr.addEventListener("readystatechange", () => { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => + resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }) + ); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} + +function addProgressListener( + xhr: XMLHttpRequestEventTarget, + listener?: (progress: TransferProgressEvent) => void +) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => + listener({ + loadedBytes: rawEvent.loaded, + }) + ); + } +} + +// exported locally for testing +export function parseHeaders(xhr: XMLHttpRequest) { + const responseHeaders = new HttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} + +function rejectOnTerminalEvent( + request: WebResourceLike, + xhr: XMLHttpRequest, + reject: (err: any) => void +) { + xhr.addEventListener("error", () => + reject( + new RestError( + `Failed to send request to ${request.url}`, + RestError.REQUEST_SEND_ERROR, + undefined, + request + ) + ) + ); + xhr.addEventListener("abort", () => + reject( + new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request) + ) + ); + xhr.addEventListener("timeout", () => + reject( + new RestError( + `timeout of ${xhr.timeout}ms exceeded`, + RestError.REQUEST_SEND_ERROR, + undefined, + request + ) + ) + ); +} diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/ms-rest-js/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000..3d4c823 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/LICENSE.txt b/node_modules/@azure/ms-rest-js/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000..bfe6430 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/README.md b/node_modules/@azure/ms-rest-js/node_modules/tslib/README.md new file mode 100644 index 0000000..a5b2692 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/README.md @@ -0,0 +1,142 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 2.3.3 or later +npm install tslib + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 2.3.3 or later +yarn add tslib + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 2.3.3 or later +bower install tslib + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 2.3.3 or later +jspm install tslib + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@1.[version].0/tslib.d.ts"] + } + } +} +``` + + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/modules/index.js b/node_modules/@azure/ms-rest-js/node_modules/tslib/modules/index.js new file mode 100644 index 0000000..d241d04 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/modules/index.js @@ -0,0 +1,51 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, +}; diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/modules/package.json b/node_modules/@azure/ms-rest-js/node_modules/tslib/modules/package.json new file mode 100644 index 0000000..aafa0e4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/package.json b/node_modules/@azure/ms-rest-js/node_modules/tslib/package.json new file mode 100644 index 0000000..f8c2a53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/package.json @@ -0,0 +1,37 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "1.14.1", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./": "./" + } +} diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/test/validateModuleExportsMatchCommonJS/index.js b/node_modules/@azure/ms-rest-js/node_modules/tslib/test/validateModuleExportsMatchCommonJS/index.js new file mode 100644 index 0000000..0c1b613 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/test/validateModuleExportsMatchCommonJS/index.js @@ -0,0 +1,23 @@ +// When on node 14, it validates that all of the commonjs exports +// are correctly re-exported for es modules importers. + +const nodeMajor = Number(process.version.split(".")[0].slice(1)) +if (nodeMajor < 14) { + console.log("Skipping because node does not support module exports.") + process.exit(0) +} + +// ES Modules import via the ./modules folder +import * as esTSLib from "../../modules/index.js" + +// Force a commonjs resolve +import { createRequire } from "module"; +const commonJSTSLib = createRequire(import.meta.url)("../../tslib.js"); + +for (const key in commonJSTSLib) { + if (commonJSTSLib.hasOwnProperty(key)) { + if(!esTSLib[key]) throw new Error(`ESModules is missing ${key} - it needs to be re-exported in ./modules/index.js`) + } +} + +console.log("All exports in commonjs are available for es module consumers.") diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/test/validateModuleExportsMatchCommonJS/package.json b/node_modules/@azure/ms-rest-js/node_modules/tslib/test/validateModuleExportsMatchCommonJS/package.json new file mode 100644 index 0000000..166e509 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/test/validateModuleExportsMatchCommonJS/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "scripts": { + "test": "node index.js" + } +} diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.d.ts b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000..0756b28 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.d.ts @@ -0,0 +1,37 @@ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +export declare function __extends(d: Function, b: Function): void; +export declare function __assign(t: any, ...sources: any[]): any; +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; +export declare function __param(paramIndex: number, decorator: Function): Function; +export declare function __metadata(metadataKey: any, metadataValue: any): Function; +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; +export declare function __generator(thisArg: any, body: Function): any; +export declare function __exportStar(m: any, exports: any): void; +export declare function __values(o: any): any; +export declare function __read(o: any, n?: number): any[]; +export declare function __spread(...args: any[][]): any[]; +export declare function __spreadArrays(...args: any[][]): any[]; +export declare function __await(v: any): any; +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; +export declare function __asyncDelegator(o: any): any; +export declare function __asyncValues(o: any): any; +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; +export declare function __importStar(mod: T): T; +export declare function __importDefault(mod: T): T | { default: T }; +export declare function __classPrivateFieldGet(receiver: T, privateMap: { has(o: T): boolean, get(o: T): V | undefined }): V; +export declare function __classPrivateFieldSet(receiver: T, privateMap: { has(o: T): boolean, set(o: T, value: V): any }, value: V): V; +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.es6.html b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000..b122e41 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.es6.js b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000..0e0d8d0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.es6.js @@ -0,0 +1,218 @@ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export function __createBinding(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +} + +export function __exportStar(m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +}; + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result.default = mod; + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); +} + +export function __classPrivateFieldSet(receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; +} diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.html b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.html new file mode 100644 index 0000000..44c9ba5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.js b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.js new file mode 100644 index 0000000..e5b7c9b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/tslib/tslib.js @@ -0,0 +1,284 @@ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + + __extends = function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __createBinding = function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }; + + __exportStar = function (m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; + }; + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); diff --git a/node_modules/@azure/ms-rest-js/package.json b/node_modules/@azure/ms-rest-js/package.json new file mode 100644 index 0000000..b08feed --- /dev/null +++ b/node_modules/@azure/ms-rest-js/package.json @@ -0,0 +1,182 @@ +{ + "name": "@azure/ms-rest-js", + "author": { + "name": "Microsoft Corporation", + "email": "azsdkteam@microsoft.com", + "url": "https://github.com/Azure/ms-rest-js" + }, + "version": "2.7.0", + "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "main": "./dist/msRest.node.js", + "module": "./es/lib/msRest.js", + "types": "./es/lib/msRest.d.ts", + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "es/lib/**/*.js", + "es/lib/**/*.js.map", + "es/lib/**/*.d.ts", + "es/lib/**/*.d.ts.map", + "lib/**/!(dom.d).ts", + "dom-shim.d.ts", + "LICENSE", + "README.md", + "ThirdPartyNotices.txt" + ], + "browser": { + "./es/lib/policies/msRestUserAgentPolicy.js": "./es/lib/policies/msRestUserAgentPolicy.browser.js", + "./es/lib/policies/agentPolicy.js": "./es/lib/policies/agentPolicy.browser.js", + "./es/lib/policies/proxyPolicy.js": "./es/lib/policies/proxyPolicy.browser.js", + "./es/lib/util/base64.js": "./es/lib/util/base64.browser.js", + "./es/lib/util/xml.js": "./es/lib/util/xml.browser.js", + "./es/lib/defaultHttpClient.js": "./es/lib/defaultHttpClient.browser.js" + }, + "license": "MIT", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.5.0" + }, + "devDependencies": { + "@azure/logger-js": "^1.1.0", + "@microsoft/api-extractor": "^7.18.11", + "@ts-common/azure-js-dev-tools": "^19.4.0", + "@types/bluebird": "3.5.36", + "@types/chai": "^4.1.7", + "@types/express": "4.17.0", + "@types/express-serve-static-core": "4.17.0", + "@types/fetch-mock": "^7.3.1", + "@types/form-data": "^2.2.1", + "@types/glob": "^7.1.1", + "@types/karma": "^3.0.3", + "@types/mocha": "^5.2.7", + "@types/node": "^12.0.12", + "@types/node-fetch": "^2.3.7", + "@types/semver": "^6.0.1", + "@types/sinon": "^7.0.13", + "@types/trusted-types": "^2.0.0", + "@types/tunnel": "0.0.1", + "@types/uuid": "^8.3.2", + "@types/webpack": "^4.4.34", + "@types/webpack-dev-middleware": "^2.0.3", + "@types/xml2js": "^0.4.4", + "abortcontroller-polyfill": "^1.3.0", + "chai": "4.3.4", + "cross-env": "^7.0.3", + "express": "^4.17.1", + "fetch-mock": "^7.3.3", + "glob": "^7.1.4", + "karma": "^4.1.0", + "karma-chai": "^0.1.0", + "karma-chrome-launcher": "^2.2.0", + "karma-firefox-launcher": "^1.1.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0", + "karma-sourcemap-loader": "^0.3.7", + "karma-typescript-es6-transform": "^4.1.1", + "karma-webpack": "^4.0.2", + "mocha": "^6.1.4", + "mocha-chrome": "^2.0.0", + "mocha-junit-reporter": "^1.23.0", + "mocha-multi-reporters": "^1.1.7", + "npm-run-all": "^4.1.5", + "nyc": "^14.1.1", + "prettier": "2.2.1", + "rollup": "^1.16.6", + "rollup-plugin-commonjs": "^10.0.1", + "rollup-plugin-json": "^4.0.0", + "rollup-plugin-multi-entry": "^2.1.0", + "rollup-plugin-node-resolve": "^5.2.0", + "rollup-plugin-resolve": "0.0.1-predev.1", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-visualizer": "^2.4.4", + "semver": "^6.2.0", + "shx": "^0.3.2", + "sinon": "^7.3.2", + "terser": "^4.0.2", + "ts-loader": "^6.0.4", + "ts-node": "^8.3.0", + "tslint": "^5.18.0", + "tslint-eslint-rules": "^5.4.0", + "typescript": "^3.5.2", + "webpack": "^4.35.2", + "webpack-cli": "^3.3.5", + "webpack-dev-middleware": "^3.7.0", + "xhr-mock": "^2.4.1", + "yarn": "^1.16.0" + }, + "homepage": "https://github.com/Azure/ms-rest-js", + "repository": { + "type": "git", + "url": "git@github.com:Azure/ms-rest-js.git" + }, + "bugs": { + "url": "http://github.com/Azure/ms-rest-js/issues" + }, + "scripts": { + "build": "run-p build:scripts build:lib", + "build:scripts": "tsc -p ./.scripts/", + "build:lib": "run-s build:tsc build:rollup build:minify-browser extract-api", + "build:tsc": "tsc -p tsconfig.es.json", + "build:rollup": "rollup -c rollup.config.ts", + "build:minify-browser": "terser -c -m --comments --source-map \"content='./dist/msRest.browser.js.map'\" -o ./dist/msRest.browser.min.js ./dist/msRest.browser.js", + "build:test-browser": "webpack --config webpack.testconfig.ts", + "extract-api": "api-extractor run --local", + "format": "prettier --write \"./**/*.ts\" *.json", + "test": "run-p test:tslint test:unit test:karma", + "test:tslint": "tslint -p .", + "test:unit": "cross-env TS_NODE_FILES=true nyc mocha", + "test:karma": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --browsers ChromeNoSecurity --single-run ", + "test:karma:debug": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --log-level debug --browsers ChromeDebugging --debug --auto-watch", + "test:karma:debugff": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --log-level debug --browsers FirefoxDebugging --debug --auto-watch", + "dep:autorest.typescript": "npx ts-node .scripts/testDependentProjects.ts autorest.typescript 'gulp build' 'gulp regenerate' 'npm run local'", + "dep:ms-rest-azure-js": "npx ts-node .scripts/testDependentProjects.ts ms-rest-azure-js", + "publish-preview": "mocha --no-colors && shx rm -rf dist/test && node ./.scripts/publish", + "local": "ts-node ./.scripts/local.ts", + "latest": "ts-node ./.scripts/latest.ts", + "prepack": "npm i && npm run build", + "check:packagejsonversion": "ts-node ./.scripts/checkPackageJsonVersion.ts", + "check:foronlycalls": "ts-node ./.scripts/checkForOnlyCalls.ts", + "check:everything": "ts-node ./.scripts/checkEverything.ts" + }, + "sideEffects": false, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + } +} diff --git a/node_modules/@azure/storage-blob/LICENSE b/node_modules/@azure/storage-blob/LICENSE new file mode 100644 index 0000000..ea8fb15 --- /dev/null +++ b/node_modules/@azure/storage-blob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/storage-blob/README.md b/node_modules/@azure/storage-blob/README.md new file mode 100644 index 0000000..8aa33ad --- /dev/null +++ b/node_modules/@azure/storage-blob/README.md @@ -0,0 +1,533 @@ +# Azure Storage Blob client library for JavaScript + +Azure Storage Blob is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data. Unstructured data is data that does not adhere to a particular data model or definition, such as text or binary data. + +This project provides a client library in JavaScript that makes it easy to consume Microsoft Azure Storage Blob service. + +Use the client libraries in this package to: + +- Get/Set Blob Service Properties +- Create/List/Delete Containers +- Create/Read/List/Update/Delete Block Blobs +- Create/Read/List/Update/Delete Page Blobs +- Create/Read/List/Update/Delete Append Blobs + +Key links + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob) +- [Package (npm)](https://www.npmjs.com/package/@azure/storage-blob/) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/@azure/storage-blob) +- [Product documentation](https://docs.microsoft.com/azure/storage/blobs/storage-blobs-overview) +- [Samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples) +- [Azure Storage Blob REST APIs](https://docs.microsoft.com/rest/api/storageservices/blob-service-rest-api) + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Prerequisites + +- An [Azure subscription](https://azure.microsoft.com/free/) +- A [Storage Account](https://docs.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal) + +### Install the package + +The preferred way to install the Azure Storage Blob client library for JavaScript is to use the npm package manager. Type the following into a terminal window: + +```bash +npm install @azure/storage-blob +``` + +### Authenticate the client + +Azure Storage supports several ways to authenticate. In order to interact with the Azure Blob Storage service you'll need to create an instance of a Storage client - `BlobServiceClient`, `ContainerClient`, or `BlobClient` for example. See [samples for creating the `BlobServiceClient`](#create-the-blob-service-client) to learn more about authentication. + +- [Azure Active Directory](#with-defaultazurecredential-from-azureidentity-package) +- [Shared Key](#with-storagesharedkeycredential) +- [Shared access signatures](#with-sas-token) + +#### Azure Active Directory + +The Azure Blob Storage service supports the use of Azure Active Directory to authenticate requests to its APIs. The [`@azure/identity`](https://www.npmjs.com/package/@azure/identity) package provides a variety of credential types that your application can use to do this. Please see the [README for `@azure/identity`](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/identity/identity/README.md) for more details and samples to get you started. + +### Compatibility + +This library is compatible with Node.js and browsers, and validated against LTS Node.js versions (>=8.16.0) and latest versions of Chrome, Firefox and Edge. + +#### Web Workers + +This library requires certain DOM objects to be globally available when used in the browser, which web workers do not make available by default. You will need to polyfill these to make this library work in web workers. + +For more information please refer to our [documentation for using Azure SDK for JS in Web Workers](https://aka.ms/azsdk/js/web-workers) + +This library depends on following DOM APIs which need external polyfills loaded when used in web workers: + +- [`document`](https://developer.mozilla.org/docs/Web/API/Document) +- [`DOMParser`](https://developer.mozilla.org/docs/Web/API/DOMParser) +- [`Node`](https://developer.mozilla.org/docs/Web/API/Node) +- [`XMLSerializer`](https://developer.mozilla.org/docs/Web/API/XMLSerializer) + +#### Differences between Node.js and browsers + +There are differences between Node.js and browsers runtime. When getting started with this library, pay attention to APIs or classes marked with _"ONLY AVAILABLE IN NODE.JS RUNTIME"_ or _"ONLY AVAILABLE IN BROWSERS"_. + +- If a blob holds compressed data in `gzip` or `deflate` format and its content encoding is set accordingly, downloading behavior is different between Node.js and browsers. In Node.js storage clients will download the blob in its compressed format, while in browsers the data will be downloaded in de-compressed format. + +##### Features, interfaces, classes or functions only available in Node.js + +- Shared Key Authorization based on account name and account key + - `StorageSharedKeyCredential` +- Shared Access Signature(SAS) generation + - `generateAccountSASQueryParameters()` + - `generateBlobSASQueryParameters()` +- Parallel uploading and downloading. Note that `BlockBlobClient.uploadData()` is available in both Node.js and browsers. + - `BlockBlobClient.uploadFile()` + - `BlockBlobClient.uploadStream()` + - `BlobClient.downloadToBuffer()` + - `BlobClient.downloadToFile()` + +##### Features, interfaces, classes or functions only available in browsers + +- Parallel uploading and downloading + - `BlockBlobClient.uploadBrowserData()` + +### JavaScript Bundle + +To use this client library in the browser, first you need to use a bundler. For details on how to do this, please refer to our [bundling documentation](https://aka.ms/AzureSDKBundling). + +### CORS + +You need to set up [Cross-Origin Resource Sharing (CORS)](https://docs.microsoft.com/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services) rules for your storage account if you need to develop for browsers. Go to Azure portal and Azure Storage Explorer, find your storage account, create new CORS rules for blob/queue/file/table service(s). + +For example, you can create following CORS settings for debugging. But please customize the settings carefully according to your requirements in production environment. + +- Allowed origins: \* +- Allowed verbs: DELETE,GET,HEAD,MERGE,POST,OPTIONS,PUT +- Allowed headers: \* +- Exposed headers: \* +- Maximum age (seconds): 86400 + +## Key concepts + +Blob storage is designed for: + +- Serving images or documents directly to a browser. +- Storing files for distributed access. +- Streaming video and audio. +- Writing to log files. +- Storing data for backup and restore, disaster recovery, and archiving. +- Storing data for analysis by an on-premises or Azure-hosted service. + +Blob storage offers three types of resources: + +- The _storage account_ used via `BlobServiceClient` +- A _container_ in the storage account used via `ContainerClient` +- A _blob_ in a container used via `BlobClient` + +## Examples + +- [Import the package](#import-the-package) +- [Create the blob service client](#create-the-blob-service-client) +- [Create a new container](#create-a-new-container) +- [List the containers](#list-the-containers) +- [Create a blob by uploading data](#create-a-blob-by-uploading-data) +- [List blobs inside a container](#list-blobs-inside-a-container) +- [Download a blob and convert it to a string (Node.js)](#download-a-blob-and-convert-it-to-a-string-nodejs) +- [Download a blob and convert it to a string (Browsers)](#download-a-blob-and-convert-it-to-a-string-browsers) + +### Import the package + +To use the clients, import the package into your file: + +```javascript +const AzureStorageBlob = require("@azure/storage-blob"); +``` + +Alternatively, selectively import only the types you need: + +```javascript +const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob"); +``` + +### Create the blob service client + +The `BlobServiceClient` requires an URL to the blob service and an access credential. It also optionally accepts some settings in the `options` parameter. + +#### with `DefaultAzureCredential` from `@azure/identity` package + +**Recommended way to instantiate a `BlobServiceClient`** + +Setup : Reference - Authorize access to blobs and queues with Azure Active Directory from a client application - https://docs.microsoft.com/azure/storage/common/storage-auth-aad-app + +- Register a new AAD application and give permissions to access Azure Storage on behalf of the signed-in user + + - Register a new application in the Azure Active Directory(in the azure-portal) - https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app + - In the `API permissions` section, select `Add a permission` and choose `Microsoft APIs`. + - Pick `Azure Storage` and select the checkbox next to `user_impersonation` and then click `Add permissions`. This would allow the application to access Azure Storage on behalf of the signed-in user. + +- Grant access to Azure Blob data with RBAC in the Azure Portal + + - RBAC roles for blobs and queues - https://docs.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal. + - In the azure portal, go to your storage-account and assign **Storage Blob Data Contributor** role to the registered AAD application from `Access control (IAM)` tab (in the left-side-navbar of your storage account in the azure-portal). + +- Environment setup for the sample + - From the overview page of your AAD Application, note down the `CLIENT ID` and `TENANT ID`. In the "Certificates & Secrets" tab, create a secret and note that down. + - Make sure you have AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET as environment variables to successfully execute the sample(Can leverage process.env). + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +// Enter your storage account name +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); +``` + +See the [Azure AD Auth sample](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/javascript/azureAdAuth.js) for a complete example using this method. + +[Note - Above steps are only for Node.js] + +#### using connection string + +Alternatively, you can instantiate a `BlobServiceClient` using the `fromConnectionString()` static method with the full connection string as the argument. (The connection string can be obtained from the azure portal.) [ONLY AVAILABLE IN NODE.JS RUNTIME] + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const connStr = ""; + +const blobServiceClient = BlobServiceClient.fromConnectionString(connStr); +``` + +#### with `StorageSharedKeyCredential` + +Alternatively, you instantiate a `BlobServiceClient` with a `StorageSharedKeyCredential` by passing account-name and account-key as arguments. (The account-name and account-key can be obtained from the azure portal.) +[ONLY AVAILABLE IN NODE.JS RUNTIME] + +```javascript +const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob"); + +// Enter your storage account name and shared key +const account = ""; +const accountKey = ""; + +// Use StorageSharedKeyCredential with storage account and account key +// StorageSharedKeyCredential is only available in Node.js runtime, not in browsers +const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey); +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + sharedKeyCredential +); +``` + +#### with SAS Token + +Also, You can instantiate a `BlobServiceClient` with a shared access signatures (SAS). You can get the SAS token from the Azure Portal or generate one using `generateAccountSASQueryParameters()`. + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const sas = ""; + +const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`); +``` + +### Create a new container + +Use `BlobServiceClient.getContainerClient()` to get a container client instance then create a new container resource. + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + // Create a container + const containerName = `newcontainer${new Date().getTime()}`; + const containerClient = blobServiceClient.getContainerClient(containerName); + const createContainerResponse = await containerClient.create(); + console.log(`Create container ${containerName} successfully`, createContainerResponse.requestId); +} + +main(); +``` + +### List the containers + +Use `BlobServiceClient.listContainers()` function to iterate the containers, +with the new `for-await-of` syntax: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + let containers = blobServiceClient.listContainers(); + for await (const container of containers) { + console.log(`Container ${i++}: ${container.name}`); + } +} + +main(); +``` + +Alternatively without using `for-await-of`: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + let iter = blobServiceClient.listContainers(); + let containerItem = await iter.next(); + while (!containerItem.done) { + console.log(`Container ${i++}: ${containerItem.value.name}`); + containerItem = await iter.next(); + } +} + +main(); +``` + +In addition, pagination is supported for listing too via `byPage()`: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + if (response.containerItems) { + for (const container of response.containerItems) { + console.log(`Container ${i++}: ${container.name}`); + } + } + } +} + +main(); +``` + +For a complete sample on iterating containers please see [samples/v12/typescript/src/listContainers.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/listContainers.ts). + +### Create a blob by uploading data + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + + const content = "Hello world!"; + const blobName = "newblob" + new Date().getTime(); + const blockBlobClient = containerClient.getBlockBlobClient(blobName); + const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId); +} + +main(); +``` + +### List blobs inside a container + +Similar to listing containers. + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + + let i = 1; + let blobs = containerClient.listBlobsFlat(); + for await (const blob of blobs) { + console.log(`Blob ${i++}: ${blob.name}`); + } +} + +main(); +``` + +For a complete sample on iterating blobs please see [samples/v12/typescript/src/listBlobsFlat.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/listBlobsFlat.ts). + +### Download a blob and convert it to a string (Node.js) + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; +const blobName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + const blobClient = containerClient.getBlobClient(blobName); + + // Get blob content from position 0 to the end + // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody + const downloadBlockBlobResponse = await blobClient.download(); + const downloaded = ( + await streamToBuffer(downloadBlockBlobResponse.readableStreamBody) + ).toString(); + console.log("Downloaded blob content:", downloaded); + + // [Node.js only] A helper method used to read a Node.js readable stream into a Buffer + async function streamToBuffer(readableStream) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); + } +} + +main(); +``` + +### Download a blob and convert it to a string (Browsers). + +Please refer to the [JavaScript Bundle](#javascript-bundle) section for more information on using this library in the browser. + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const sas = ""; +const containerName = ""; +const blobName = ""; + +const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`); + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + const blobClient = containerClient.getBlobClient(blobName); + + // Get blob content from position 0 to the end + // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + const downloadBlockBlobResponse = await blobClient.download(); + const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + console.log("Downloaded blob content", downloaded); + + // [Browsers only] A helper method used to convert a browser Blob into string. + async function blobToString(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsText(blob); + }); + } +} + +main(); +``` + +A complete example of simple scenarios is at [samples/v12/typescript/src/sharedKeyAuth.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/sharedKeyAuth.ts). + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +const { setLogLevel } = require("@azure/logger"); + +setLogLevel("info"); +``` + +## Next steps + +More code samples: + +- [Blob Storage Samples (JavaScript)](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples/v12/javascript) +- [Blob Storage Samples (TypeScript)](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples/v12/typescript) +- [Blob Storage Test Cases](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/test/) + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +Also refer to [Storage specific guide](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/CONTRIBUTING.md) for additional information on setting up the test environment for storage libraries. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fstorage%2Fstorage-blob%2FREADME.png) diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js new file mode 100644 index 0000000..bd2fea4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=BatchResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map new file mode 100644 index 0000000..43638fc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchResponse.js","sourceRoot":"","sources":["../../../src/BatchResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { HttpHeaders } from \"@azure/core-http\";\n\n/**\n * The response data associated with a single request within a batch operation.\n */\nexport interface BatchSubResponse {\n /**\n * The status code of the sub operation.\n */\n status: number;\n\n /**\n * The status message of the sub operation.\n */\n statusMessage: string;\n\n /**\n * The error code of the sub operation, if the sub operation failed.\n */\n errorCode?: string;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeaders;\n\n /**\n * The body as text.\n */\n bodyAsText?: string;\n\n /**\n * The batch sub request corresponding to the sub response.\n */\n _request: BatchSubRequest;\n}\n\n/**\n * The multipart/mixed response which contains the response for each subrequest.\n */\nexport interface ParsedBatchResponse {\n /**\n * The parsed sub responses.\n */\n subResponses: BatchSubResponse[];\n\n /**\n * The succeeded executed sub responses' count;\n */\n subResponsesSucceededCount: number;\n\n /**\n * The failed executed sub responses' count;\n */\n subResponsesFailedCount: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js new file mode 100644 index 0000000..1d083ad --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "@azure/core-http"; +import { HTTP_VERSION_1_1, HTTP_LINE_ENDING, HeaderConstants, HTTPURLConnection, } from "./utils/constants"; +import { getBodyAsText } from "./BatchUtils"; +import { logger } from "./log"; +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +export class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} +//# sourceMappingURL=BatchResponseParser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map new file mode 100644 index 0000000..818e7e9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchResponseParser.js","sourceRoot":"","sources":["../../../src/BatchResponseParser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAG/C,OAAO,EACL,gBAAgB,EAChB,gBAAgB,EAChB,eAAe,EACf,iBAAiB,GAClB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAG7C,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAE/B,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,eAAe,GAAG,GAAG,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,CAAC;AAErB;;GAEG;AACH,MAAM,OAAO,mBAAmB;IAO9B,YACE,aAA8C,EAC9C,WAAyC;QAEzC,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE;YAChD,+FAA+F;YAC/F,MAAM,IAAI,UAAU,CAAC,mEAAmE,CAAC,CAAC;SAC3F;QAED,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC,EAAE;YAC1C,wCAAwC;YACxC,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;SAClF;QAED,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,aAAa,CAAC,WAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,iBAAiB,GAAG,KAAK,IAAI,CAAC,qBAAqB,GAAG,gBAAgB,EAAE,CAAC;QAC9E,IAAI,CAAC,mBAAmB,GAAG,KAAK,IAAI,CAAC,qBAAqB,IAAI,CAAC;IACjE,CAAC;IAED,yHAAyH;IAClH,KAAK,CAAC,kBAAkB;QAC7B,uGAAuG;QACvG,0BAA0B;QAC1B,IAAI,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,KAAK,iBAAiB,CAAC,aAAa,EAAE;YAC3E,MAAM,IAAI,KAAK,CACb,qDAAqD,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,IAAI,CAC7F,CAAC;SACH;QAED,MAAM,kBAAkB,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnE,MAAM,YAAY,GAAG,kBAAkB;aACpC,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,iCAAiC;aACpE,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;aAC7B,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,mDAAmD;QAChE,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC;QAE7C,uDAAuD;QACvD,4EAA4E;QAC5E,+FAA+F;QAC/F,uGAAuG;QACvG,IAAI,gBAAgB,KAAK,IAAI,CAAC,WAAW,CAAC,IAAI,IAAI,gBAAgB,KAAK,CAAC,EAAE;YACxE,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;SAC7F;QAED,MAAM,wBAAwB,GAA4B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;QACtF,IAAI,0BAA0B,GAAW,CAAC,CAAC;QAC3C,IAAI,uBAAuB,GAAW,CAAC,CAAC;QAExC,0BAA0B;QAC1B,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,gBAAgB,EAAE,KAAK,EAAE,EAAE;YACrD,MAAM,WAAW,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;YACxC,MAAM,uBAAuB,GAAG,EAAsB,CAAC;YACvD,uBAAuB,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAEpD,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC;YAC/D,IAAI,uBAAuB,GAAG,KAAK,CAAC;YACpC,IAAI,qBAAqB,GAAG,KAAK,CAAC;YAClC,IAAI,aAAa,GAAG,KAAK,CAAC;YAC1B,IAAI,SAAS,GAAG,SAAS,CAAC;YAE1B,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAI,CAAC,uBAAuB,EAAE;oBAC5B,yCAAyC;oBACzC,IAAI,YAAY,CAAC,UAAU,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;wBACvD,SAAS,GAAG,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;qBACpE;oBAED,oFAAoF;oBACpF,iCAAiC;oBACjC,IAAI,YAAY,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;wBAC7C,uBAAuB,GAAG,IAAI,CAAC;wBAE/B,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;wBACnD,uBAAuB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;wBACrD,uBAAuB,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;qBAC/E;oBAED,SAAS,CAAC,iHAAiH;iBAC5H;gBAED,IAAI,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;oBAC9B,4GAA4G;oBAC5G,IAAI,CAAC,qBAAqB,EAAE;wBAC1B,qBAAqB,GAAG,IAAI,CAAC;qBAC9B;oBAED,SAAS,CAAC,kBAAkB;iBAC7B;gBAED,2EAA2E;gBAC3E,IAAI,CAAC,qBAAqB,EAAE;oBAC1B,IAAI,YAAY,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;wBACtD,2DAA2D;wBAC3D,MAAM,IAAI,KAAK,CACb,uCAAuC,YAAY,oCAAoC,qBAAqB,IAAI,CACjH,CAAC;qBACH;oBAED,iCAAiC;oBACjC,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;oBACzD,uBAAuB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,eAAe,CAAC,eAAe,EAAE;wBACjD,uBAAuB,CAAC,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC9C,aAAa,GAAG,IAAI,CAAC;qBACtB;iBACF;qBAAM;oBACL,iCAAiC;oBACjC,IAAI,CAAC,uBAAuB,CAAC,UAAU,EAAE;wBACvC,uBAAuB,CAAC,UAAU,GAAG,EAAE,CAAC;qBACzC;oBAED,uBAAuB,CAAC,UAAU,IAAI,YAAY,CAAC;iBACpD;aACF,CAAC,gBAAgB;YAElB,kHAAkH;YAClH,uHAAuH;YACvH,oHAAoH;YACpH,uHAAuH;YACvH,IACE,SAAS,KAAK,SAAS;gBACvB,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;gBAC3B,SAAS,IAAI,CAAC;gBACd,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI;gBACjC,wBAAwB,CAAC,SAAS,CAAC,KAAK,SAAS,EACjD;gBACA,uBAAuB,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;gBACpE,wBAAwB,CAAC,SAAS,CAAC,GAAG,uBAAuB,CAAC;aAC/D;iBAAM;gBACL,MAAM,CAAC,KAAK,CACV,gBAAgB,KAAK,uEAAuE,SAAS,EAAE,CACxG,CAAC;aACH;YAED,IAAI,aAAa,EAAE;gBACjB,uBAAuB,EAAE,CAAC;aAC3B;iBAAM;gBACL,0BAA0B,EAAE,CAAC;aAC9B;SACF;QAED,OAAO;YACL,YAAY,EAAE,wBAAwB;YACtC,0BAA0B,EAAE,0BAA0B;YACtD,uBAAuB,EAAE,uBAAuB;SACjD,CAAC;IACJ,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"@azure/core-http\";\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport {\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n HeaderConstants,\n HTTPURLConnection,\n} from \"./utils/constants\";\nimport { getBodyAsText } from \"./BatchUtils\";\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { BatchSubResponse, ParsedBatchResponse } from \"./BatchResponse\";\nimport { logger } from \"./log\";\n\nconst HTTP_HEADER_DELIMITER = \": \";\nconst SPACE_DELIMITER = \" \";\nconst NOT_FOUND = -1;\n\n/**\n * Util class for parsing batch response.\n */\nexport class BatchResponseParser {\n private readonly batchResponse: ServiceSubmitBatchResponseModel;\n private readonly responseBatchBoundary: string;\n private readonly perResponsePrefix: string;\n private readonly batchResponseEnding: string;\n private readonly subRequests: Map;\n\n constructor(\n batchResponse: ServiceSubmitBatchResponseModel,\n subRequests: Map\n ) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType!.split(\"=\")[1];\n this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;\n this.batchResponseEnding = `--${this.responseBatchBoundary}--`;\n }\n\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n public async parseBatchResponse(): Promise {\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\n `Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`\n );\n }\n\n const responseBodyAsText = await getBodyAsText(this.batchResponse);\n\n const subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1); // string before first response boundary is useless\n const subResponseCount = subResponses.length;\n\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n\n const deserializedSubResponses: Array = new Array(subResponseCount);\n let subResponsesSucceededCount: number = 0;\n let subResponsesFailedCount: number = 0;\n\n // Parse sub subResponses.\n for (let index = 0; index < subResponseCount; index++) {\n const subResponse = subResponses[index];\n const deserializedSubResponse = {} as BatchSubResponse;\n deserializedSubResponse.headers = new HttpHeaders();\n\n const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);\n let subRespHeaderStartFound = false;\n let subRespHeaderEndFound = false;\n let subRespFailed = false;\n let contentId = NOT_FOUND;\n\n for (const responseLine of responseLines) {\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n\n const tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n\n continue; // Skip empty line\n }\n\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\n `Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`\n );\n }\n\n // Parse headers of sub response.\n const tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n } else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (\n contentId !== NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined\n ) {\n deserializedSubResponse._request = this.subRequests.get(contentId)!;\n deserializedSubResponses[contentId] = deserializedSubResponse;\n } else {\n logger.error(\n `subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`\n );\n }\n\n if (subRespFailed) {\n subResponsesFailedCount++;\n } else {\n subResponsesSucceededCount++;\n }\n }\n\n return {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js new file mode 100644 index 0000000..1a3926a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { blobToString } from "./utils/utils.browser"; +export async function getBodyAsText(batchResponse) { + const blob = (await batchResponse.blobBody); + return blobToString(blob); +} +export function utf8ByteLength(str) { + return new Blob([str]).size; +} +//# sourceMappingURL=BatchUtils.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map new file mode 100644 index 0000000..3d84ba6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchUtils.browser.js","sourceRoot":"","sources":["../../../src/BatchUtils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,MAAM,IAAI,GAAG,CAAC,MAAM,aAAa,CAAC,QAAQ,CAAS,CAAC;IACpD,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { blobToString } from \"./utils/utils.browser\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n const blob = (await batchResponse.blobBody) as Blob;\n return blobToString(blob);\n}\n\nexport function utf8ByteLength(str: string): number {\n return new Blob([str]).size;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js new file mode 100644 index 0000000..35be3b1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { streamToBuffer2 } from "./utils/utils.node"; +import { BATCH_MAX_PAYLOAD_IN_BYTES } from "./utils/constants"; +export async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +export function utf8ByteLength(str) { + return Buffer.byteLength(str); +} +//# sourceMappingURL=BatchUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map new file mode 100644 index 0000000..959a5a1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchUtils.js","sourceRoot":"","sources":["../../../src/BatchUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAE/D,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;IAEF,6CAA6C;IAC7C,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;IAEzC,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js new file mode 100644 index 0000000..6de406b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js @@ -0,0 +1,282 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, deserializationPolicy, generateUuid, HttpHeaders, WebResource, isTokenCredential, bearerTokenAuthenticationPolicy, isNode, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { BlobClient } from "./Clients"; +import { Mutex } from "./utils/Mutex"; +import { Pipeline } from "./Pipeline"; +import { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common"; +import { HeaderConstants, BATCH_MAX_REQUEST, HTTP_VERSION_1_1, HTTP_LINE_ENDING, StorageOAuthScopes, } from "./utils/constants"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { createSpan } from "./utils/tracing"; +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = isTokenCredential(credential) + ? attachCredential(bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new WebResource(), + status: 200, + headers: new HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=BlobBatch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map new file mode 100644 index 0000000..a6351fa --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobBatch.js","sourceRoot":"","sources":["../../../src/BlobBatch.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,YAAY,EACZ,WAAW,EAKX,WAAW,EAEX,iBAAiB,EACjB,+BAA+B,EAC/B,MAAM,GACP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,UAAU,EAAyC,MAAM,WAAW,CAAC;AAE9E,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAChG,OAAO,EACL,eAAe,EACf,iBAAiB,EACjB,gBAAgB,EAChB,gBAAgB,EAChB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAmB7C;;;GAGG;AACH,MAAM,OAAO,SAAS;IAKpB;QAHiB,UAAK,GAAW,OAAO,CAAC;QAIvC,IAAI,CAAC,YAAY,GAAG,IAAI,iBAAiB,EAAE,CAAC;IAC9C,CAAC;IAED;;;;OAIG;IACI,uBAAuB;QAC5B,OAAO,IAAI,CAAC,YAAY,CAAC,uBAAuB,EAAE,CAAC;IACrD,CAAC;IAED;;OAEG;IACI,kBAAkB;QACvB,OAAO,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,CAAC;IAChD,CAAC;IAED;;OAEG;IACI,cAAc;QACnB,OAAO,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;IAC5C,CAAC;IAEO,KAAK,CAAC,qBAAqB,CACjC,UAA2B,EAC3B,sBAA2C;QAE3C,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE7B,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC;YAC/C,MAAM,sBAAsB,EAAE,CAAC;YAC/B,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;SACjD;gBAAS;YACR,MAAM,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;SAChC;IACH,CAAC;IAEO,YAAY,CAAC,SAAqC;QACxD,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;SAC5B;QACD,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;YAChC,MAAM,IAAI,UAAU,CAClB,yFAAyF,IAAI,CAAC,SAAS,cAAc,CACtH,CAAC;SACH;IACH,CAAC;IAqCM,KAAK,CAAC,UAAU,CACrB,eAAoC,EACpC,mBAKa,EACb,OAA2B;QAE3B,IAAI,GAAW,CAAC;QAChB,IAAI,UAA8E,CAAC;QAEnF,IACE,OAAO,eAAe,KAAK,QAAQ;YACnC,CAAC,CAAC,MAAM,IAAI,mBAAmB,YAAY,0BAA0B,CAAC;gBACpE,mBAAmB,YAAY,mBAAmB;gBAClD,iBAAiB,CAAC,mBAAmB,CAAC,CAAC,EACzC;YACA,iBAAiB;YACjB,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,mBAAmB,CAAC;SAClC;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;YAChD,kBAAkB;YAClB,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;YAC1B,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,OAAO,GAAG,mBAAwC,CAAC;SACpD;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;SACH;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC5B,MAAM,IAAI,CAAC,qBAAqB,CAC9B;gBACE,GAAG,EAAE,GAAG;gBACR,UAAU,EAAE,UAAU;aACvB,EACD,KAAK,IAAI,EAAE;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAC5E,cAAc,CACf,CAAC;YACJ,CAAC,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAgDM,KAAK,CAAC,iBAAiB,CAC5B,eAAoC,EACpC,gBAIc,EACd,aAA+C,EAC/C,OAA4B;QAE5B,IAAI,GAAW,CAAC;QAChB,IAAI,UAA8E,CAAC;QACnF,IAAI,IAAgB,CAAC;QAErB,IACE,OAAO,eAAe,KAAK,QAAQ;YACnC,CAAC,CAAC,MAAM,IAAI,gBAAgB,YAAY,0BAA0B,CAAC;gBACjE,gBAAgB,YAAY,mBAAmB;gBAC/C,iBAAiB,CAAC,gBAAgB,CAAC,CAAC,EACtC;YACA,iBAAiB;YACjB,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,gBAGM,CAAC;YACpB,IAAI,GAAG,aAA2B,CAAC;SACpC;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;YAChD,kBAAkB;YAClB,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;YAC1B,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,IAAI,GAAG,gBAA8B,CAAC;YACtC,OAAO,GAAG,aAAmC,CAAC;SAC/C;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;SACH;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;YACnC,MAAM,IAAI,CAAC,qBAAqB,CAC9B;gBACE,GAAG,EAAE,GAAG;gBACR,UAAU,EAAE,UAAU;aACvB,EACD,KAAK,IAAI,EAAE;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CACnF,IAAI,EACJ,cAAc,CACf,CAAC;YACJ,CAAC,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,iBAAiB;IASrB;QACE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QAEf,MAAM,QAAQ,GAAG,YAAY,EAAE,CAAC;QAEhC,kBAAkB;QAClB,IAAI,CAAC,QAAQ,GAAG,SAAS,QAAQ,EAAE,CAAC;QACpC,oBAAoB;QACpB,iCAAiC;QACjC,oCAAoC;QACpC,IAAI,CAAC,gBAAgB,GAAG,KAAK,IAAI,CAAC,QAAQ,GAAG,gBAAgB,GAAG,eAAe,CAAC,YAAY,qBAAqB,gBAAgB,GAAG,eAAe,CAAC,yBAAyB,UAAU,CAAC;QACxL,4CAA4C;QAC5C,IAAI,CAAC,oBAAoB,GAAG,6BAA6B,IAAI,CAAC,QAAQ,EAAE,CAAC;QACzE,sBAAsB;QACtB,IAAI,CAAC,kBAAkB,GAAG,KAAK,IAAI,CAAC,QAAQ,IAAI,CAAC;QAEjD,IAAI,CAAC,WAAW,GAAG,IAAI,GAAG,EAAE,CAAC;IAC/B,CAAC;IAED;;;;;;OAMG;IACI,cAAc,CACnB,UAA8E;QAE9E,MAAM,gBAAgB,GAAG,UAAU,YAAY,mBAAmB,CAAC;QACnE,MAAM,mBAAmB,GAAG,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,mHAAmH;QAC/K,MAAM,SAAS,GAA2B,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAEzE,SAAS,CAAC,CAAC,CAAC,GAAG,qBAAqB,EAAE,CAAC,CAAC,8DAA8D;QACtG,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,8BAA8B,EAAE,CAAC,CAAC,gEAAgE;QACrH,IAAI,CAAC,gBAAgB,EAAE;YACrB,SAAS,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,UAAU,CAAC;gBAC1C,CAAC,CAAC,gBAAgB,CACd,+BAA+B,CAAC,UAAU,EAAE,kBAAkB,CAAC,EAC/D,UAAU,CACX;gBACH,CAAC,CAAC,UAAU,CAAC;SAChB;QACD,SAAS,CAAC,mBAAmB,GAAG,CAAC,CAAC,GAAG,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC,yFAAyF;QAE3K,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;IACrC,CAAC;IAEM,sBAAsB,CAAC,OAAoB;QAChD,gCAAgC;QAChC,IAAI,CAAC,IAAI,IAAI;YACX,IAAI,CAAC,gBAAgB;YACrB,GAAG,eAAe,CAAC,UAAU,KAAK,IAAI,CAAC,cAAc,EAAE;YACvD,EAAE;YACF,GAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAChD,OAAO,CAAC,GAAG,CACZ,IAAI,gBAAgB,GAAG,gBAAgB,EAAE,EAAE,qCAAqC;SAClF,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAEzB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,CAAC,IAAI,IAAI,GAAG,MAAM,CAAC,IAAI,KAAK,MAAM,CAAC,KAAK,GAAG,gBAAgB,EAAE,CAAC;SACnE;QAED,IAAI,CAAC,IAAI,IAAI,gBAAgB,CAAC,CAAC,0DAA0D;QACzF,wDAAwD;QACxD,8BAA8B;IAChC,CAAC;IAEM,gBAAgB,CAAC,UAA2B;QACjD,IAAI,IAAI,CAAC,cAAc,IAAI,iBAAiB,EAAE;YAC5C,MAAM,IAAI,UAAU,CAAC,iBAAiB,iBAAiB,iCAAiC,CAAC,CAAC;SAC3F;QAED,8CAA8C;QAC9C,MAAM,IAAI,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QACxC,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACxB,MAAM,IAAI,UAAU,CAAC,iCAAiC,UAAU,CAAC,GAAG,GAAG,CAAC,CAAC;SAC1E;IACH,CAAC;IAEM,iBAAiB,CAAC,UAA2B;QAClD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;QACtD,IAAI,CAAC,cAAc,EAAE,CAAC;IACxB,CAAC;IAED,wFAAwF;IACjF,kBAAkB;QACvB,OAAO,GAAG,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,kBAAkB,GAAG,gBAAgB,EAAE,CAAC;IACrE,CAAC;IAEM,uBAAuB;QAC5B,OAAO,IAAI,CAAC,oBAAoB,CAAC;IACnC,CAAC;IAEM,cAAc;QACnB,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;CACF;AAED,MAAM,0BAA2B,SAAQ,iBAAiB;IAQxD,YACE,YAA+B,EAC/B,UAAyB,EACzB,OAA6B;QAE7B,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAXZ,kBAAa,GAA0B;YACtD,OAAO,EAAE,IAAI,WAAW,EAAE;YAC1B,MAAM,EAAE,GAAG;YACX,OAAO,EAAE,IAAI,WAAW,EAAE;SAC3B,CAAC;QASA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,MAAM,IAAI,CAAC,YAAY,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC;QAExD,OAAO,IAAI,CAAC,aAAa,CAAC,CAAC,uCAAuC;IACpE,CAAC;CACF;AAED,MAAM,iCAAiC;IAGrC,YAAY,YAA+B;QACzC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,0BAA0B,CAAC,IAAI,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;IAChF,CAAC;CACF;AAED,MAAM,uBAAwB,SAAQ,iBAAiB;IACrD,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,aAAa,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC,YAAY,CAAC,EAAE;gBACrD,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC;aAC7B;SACF;QAED,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,2DAA2D;SACnG;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF;AAED,MAAM,8BAA8B;IAC3B,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,uBAAuB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n generateUuid,\n HttpHeaders,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n WebResource,\n TokenCredential,\n isTokenCredential,\n bearerTokenAuthenticationPolicy,\n isNode,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobClient, BlobDeleteOptions, BlobSetTierOptions } from \"./Clients\";\nimport { AccessTier } from \"./generatedModels\";\nimport { Mutex } from \"./utils/Mutex\";\nimport { Pipeline } from \"./Pipeline\";\nimport { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from \"./utils/utils.common\";\nimport {\n HeaderConstants,\n BATCH_MAX_REQUEST,\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n StorageOAuthScopes,\n} from \"./utils/constants\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { createSpan } from \"./utils/tracing\";\n\n/**\n * A request associated with a batch operation.\n */\nexport interface BatchSubRequest {\n /**\n * The URL of the resource to request operation.\n */\n url: string;\n\n /**\n * The credential used for sub request.\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service.\n * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n}\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nexport class BlobBatch {\n private batchRequest: InnerBatchRequest;\n private readonly batch: string = \"batch\";\n private batchType: \"delete\" | \"setAccessTier\" | undefined;\n\n constructor() {\n this.batchRequest = new InnerBatchRequest();\n }\n\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n public getMultiPartContentType(): string {\n return this.batchRequest.getMultipartContentType();\n }\n\n /**\n * Get assembled HTTP request body for sub requests.\n */\n public getHttpRequestBody(): string {\n return this.batchRequest.getHttpRequestBody();\n }\n\n /**\n * Get sub requests that are added into the batch request.\n */\n public getSubRequests(): Map {\n return this.batchRequest.getSubRequests();\n }\n\n private async addSubRequestInternal(\n subRequest: BatchSubRequest,\n assembleSubRequestFunc: () => Promise\n ): Promise {\n await Mutex.lock(this.batch);\n\n try {\n this.batchRequest.preAddSubRequest(subRequest);\n await assembleSubRequestFunc();\n this.batchRequest.postAddSubRequest(subRequest);\n } finally {\n await Mutex.unlock(this.batch);\n }\n }\n\n private setBatchType(batchType: \"delete\" | \"setAccessTier\"): void {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\n `BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`\n );\n }\n }\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlob(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param options -\n */\n public async deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise;\n\n public async deleteBlob(\n urlOrBlobClient: string | BlobClient,\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n options?: BlobDeleteOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n isTokenCredential(credentialOrOptions))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions as BlobDeleteOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchDeleteRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"delete\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n blobClient: BlobClient,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobAccessTier(\n urlOrBlobClient: string | BlobClient,\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n options?: BlobSetTierOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n let tier: AccessTier;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n isTokenCredential(credentialOrTier))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier as\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential;\n tier = tierOrOptions as AccessTier;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier as AccessTier;\n options = tierOrOptions as BlobSetTierOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchSetTierRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"setAccessTier\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(\n tier,\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nclass InnerBatchRequest {\n private operationCount: number;\n private body: string;\n private subRequests: Map;\n private readonly boundary: string;\n private readonly subRequestPrefix: string;\n private readonly multipartContentType: string;\n private readonly batchRequestEnding: string;\n\n constructor() {\n this.operationCount = 0;\n this.body = \"\";\n\n const tempGuid = generateUuid();\n\n // batch_{batchid}\n this.boundary = `batch_${tempGuid}`;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;\n // --batch_{batchid}--\n this.batchRequestEnding = `--${this.boundary}--`;\n\n this.subRequests = new Map();\n }\n\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public createPipeline(\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential\n ): Pipeline {\n const isAnonymousCreds = credential instanceof AnonymousCredential;\n const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n const factories: RequestPolicyFactory[] = new Array(policyFactoryLength);\n\n factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = isTokenCredential(credential)\n ? attachCredential(\n bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),\n credential\n )\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n\n return new Pipeline(factories, {});\n }\n\n public appendSubRequestToBody(request: WebResource) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix, // sub request constant prefix\n `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID\n \"\", // empty line after sub request's content ID\n `${request.method.toString()} ${getURLPathAndQuery(\n request.url\n )} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n\n for (const header of request.headers.headersArray()) {\n this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;\n }\n\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n }\n\n public preAddSubRequest(subRequest: BatchSubRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);\n }\n\n // Fast fail if url for sub request is invalid\n const path = getURLPath(subRequest.url);\n if (!path || path === \"\") {\n throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);\n }\n }\n\n public postAddSubRequest(subRequest: BatchSubRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n }\n\n // Return the http request body with assembling the ending line to the sub request body.\n public getHttpRequestBody(): string {\n return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;\n }\n\n public getMultipartContentType(): string {\n return this.multipartContentType;\n }\n\n public getSubRequests(): Map {\n return this.subRequests;\n }\n}\n\nclass BatchRequestAssemblePolicy extends BaseRequestPolicy {\n private batchRequest: InnerBatchRequest;\n private readonly dummyResponse: HttpOperationResponse = {\n request: new WebResource(),\n status: 200,\n headers: new HttpHeaders(),\n };\n\n constructor(\n batchRequest: InnerBatchRequest,\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ) {\n super(nextPolicy, options);\n\n this.batchRequest = batchRequest;\n }\n\n public async sendRequest(request: WebResource): Promise {\n await this.batchRequest.appendSubRequestToBody(request);\n\n return this.dummyResponse; // Intercept request from going to wire\n }\n}\n\nclass BatchRequestAssemblePolicyFactory implements RequestPolicyFactory {\n private batchRequest: InnerBatchRequest;\n\n constructor(batchRequest: InnerBatchRequest) {\n this.batchRequest = batchRequest;\n }\n\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): BatchRequestAssemblePolicy {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n }\n}\n\nclass BatchHeaderFilterPolicy extends BaseRequestPolicy {\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(request: WebResource): Promise {\n let xMsHeaderName = \"\";\n\n for (const header of request.headers.headersArray()) {\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n\nclass BatchHeaderFilterPolicyFactory implements RequestPolicyFactory {\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): BatchHeaderFilterPolicy {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js new file mode 100644 index 0000000..f15f53d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BatchResponseParser } from "./BatchResponseParser"; +import { utf8ByteLength } from "./BatchUtils"; +import { BlobBatch } from "./BlobBatch"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { Service, Container } from "./generated/src/operations"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageClientContext } from "./generated/src/storageClientContext"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { getURLPath } from "./utils/utils.common"; +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=BlobBatchClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map new file mode 100644 index 0000000..5d47ea9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobBatchClient.js","sourceRoot":"","sources":["../../../src/BlobBatchClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AASlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAEjF,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAEhE,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAExE,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAC5E,OAAO,EAAwC,WAAW,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAC/F,OAAO,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAiClD;;;;GAIG;AACH,MAAM,OAAO,eAAe;IA8B1B,YACE,GAAW,EACX,oBAIgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;SACjC;aAAM,IAAI,CAAC,oBAAoB,EAAE;YAChC,yBAAyB;YACzB,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM;YACL,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;SACvD;QAED,MAAM,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;QAE9F,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QAC7B,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,EAAE;YACxB,oBAAoB;YACpB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;SACtE;aAAM;YACL,IAAI,CAAC,yBAAyB,GAAG,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAC;SACpE;IACH,CAAC;IAED;;;OAGG;IACI,WAAW;QAChB,OAAO,IAAI,SAAS,EAAE,CAAC;IACzB,CAAC;IAsCM,KAAK,CAAC,WAAW,CACtB,iBAA0C,EAC1C,mBAKa;IACb,mFAAmF;IACnF,gEAAgE;IAChE,OAA2B;QAE3B,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;QAC9B,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;YAC/C,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAsC,EAAE,OAAO,CAAC,CAAC;aAC1F;iBAAM;gBACL,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAwC,CAAC,CAAC;aACnF;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAkDM,KAAK,CAAC,kBAAkB,CAC7B,iBAA0C,EAC1C,gBAIc,EACd,aAA+C;IAC/C,mFAAmF;IACnF,gEAAgE;IAChE,OAA4B;QAE5B,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;QAC9B,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;YAC/C,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAAmC,EACnC,aAA2B,EAC3B,OAAO,CACR,CAAC;aACH;iBAAM;gBACL,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAA8B,EAC9B,aAAmC,CACpC,CAAC;aACH;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkCG;IACI,KAAK,CAAC,WAAW,CACtB,YAAuB,EACvB,UAA8C,EAAE;QAEhD,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE;YAC7D,MAAM,IAAI,UAAU,CAAC,wDAAwD,CAAC,CAAC;SAChF;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,MAAM,gBAAgB,GAAG,YAAY,CAAC,kBAAkB,EAAE,CAAC;YAE3D,2FAA2F;YAC3F,MAAM,gBAAgB,GACpB,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC9C,cAAc,CAAC,gBAAgB,CAAC,EAChC,YAAY,CAAC,uBAAuB,EAAE,EACtC,gBAAgB,kCAEX,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YAEJ,gHAAgH;YAChH,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,CACjD,gBAAgB,EAChB,YAAY,CAAC,cAAc,EAAE,CAC9B,CAAC;YACF,MAAM,eAAe,GAAG,MAAM,mBAAmB,CAAC,kBAAkB,EAAE,CAAC;YAEvE,MAAM,GAAG,GAAiC;gBACxC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,WAAW,EAAE,gBAAgB,CAAC,WAAW;gBACzC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,eAAe,EAAE,gBAAgB,CAAC,eAAe;gBACjD,OAAO,EAAE,gBAAgB,CAAC,OAAO;gBACjC,YAAY,EAAE,eAAe,CAAC,YAAY;gBAC1C,0BAA0B,EAAE,eAAe,CAAC,0BAA0B;gBACtE,uBAAuB,EAAE,eAAe,CAAC,uBAAuB;aACjE,CAAC;YAEF,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AccessTier,\n ServiceSubmitBatchHeaders,\n ServiceSubmitBatchOptionalParamsModel,\n ServiceSubmitBatchResponseModel,\n} from \"./generatedModels\";\nimport { ParsedBatchResponse } from \"./BatchResponse\";\nimport { BatchResponseParser } from \"./BatchResponseParser\";\nimport { utf8ByteLength } from \"./BatchUtils\";\nimport { BlobBatch } from \"./BlobBatch\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { HttpResponse, TokenCredential } from \"@azure/core-http\";\nimport { Service, Container } from \"./generated/src/operations\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobDeleteOptions, BlobClient, BlobSetTierOptions } from \"./Clients\";\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike, StoragePipelineOptions, newPipeline, isPipelineLike } from \"./Pipeline\";\nimport { getURLPath } from \"./utils/utils.common\";\n\n/**\n * Options to configure the Service - Submit Batch Optional Params.\n */\nexport interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel {}\n\n/**\n * Contains response data for blob batch operations.\n */\nexport declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse &\n ServiceSubmitBatchHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n };\n\n/**\n * Contains response data for the {@link deleteBlobs} operation.\n */\nexport declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * Contains response data for the {@link setBlobsAccessTier} operation.\n */\nexport declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nexport class BlobBatchClient {\n private serviceOrContainerContext: Service | Container;\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n\n const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n\n const path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n } else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n public createBatch(): BlobBatch {\n return new BlobBatch();\n }\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operations will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resources to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlobs(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation(subrequest) will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs to delete.\n * @param options -\n */\n public async deleteBlobs(\n blobClients: BlobClient[],\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n public async deleteBlobs(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as TokenCredential, options);\n } else {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as BlobDeleteOptions);\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs which should have a new tier set.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n blobClients: BlobClient[],\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobsAccessTier(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as TokenCredential,\n tierOrOptions as AccessTier,\n options\n );\n } else {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as AccessTier,\n tierOrOptions as BlobSetTierOptions\n );\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n public async submitBatch(\n batchRequest: BlobBatch,\n options: BlobBatchSubmitBatchOptionalParams = {}\n ): Promise {\n if (!batchRequest || batchRequest.getSubRequests().size === 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n\n const { span, updatedOptions } = createSpan(\"BlobBatchClient-submitBatch\", options);\n try {\n const batchRequestBody = batchRequest.getHttpRequestBody();\n\n // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.\n const rawBatchResponse: ServiceSubmitBatchResponseModel =\n await this.serviceOrContainerContext.submitBatch(\n utf8ByteLength(batchRequestBody),\n batchRequest.getMultiPartContentType(),\n batchRequestBody,\n {\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).\n const batchResponseParser = new BatchResponseParser(\n rawBatchResponse,\n batchRequest.getSubRequests()\n );\n const responseSummary = await batchResponseParser.parseBatchResponse();\n\n const res: BlobBatchSubmitBatchResponse = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js new file mode 100644 index 0000000..567242b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This file is used as a shim of "BlobDownloadResponse" for some browser bundlers +// when trying to bundle "BlobDownloadResponse" +// "BlobDownloadResponse" class is only available in Node.js runtime +export const BlobDownloadResponse = 1; +//# sourceMappingURL=BlobDownloadResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map new file mode 100644 index 0000000..f1a82af --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobDownloadResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,kFAAkF;AAClF,+CAA+C;AAC/C,oEAAoE;AACpE,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BlobDownloadResponse\" for some browser bundlers\n// when trying to bundle \"BlobDownloadResponse\"\n// \"BlobDownloadResponse\" class is only available in Node.js runtime\nexport const BlobDownloadResponse = 1;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js new file mode 100644 index 0000000..2c8c0e1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js @@ -0,0 +1,463 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-http"; +import { RetriableReadableStream, } from "./utils/RetriableReadableStream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +export class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * Returns the date and time the blob was created. + * + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobDownloadResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map new file mode 100644 index 0000000..8ce9512 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobDownloadResponse.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAgB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAYxD,OAAO,EAEL,uBAAuB,GAExB,MAAM,iCAAiC,CAAC;AAEzC;;;;;;;;;GASG;AACH,MAAM,OAAO,oBAAoB;IAse/B;;;;;;;;OAQG;IACH,YACE,gBAA4C,EAC5C,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,UAA0C,EAAE;QAE5C,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,uBAAuB,CACnD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,MAAM,EACN,MAAM,EACN,KAAK,EACL,OAAO,CACR,CAAC;IACJ,CAAC;IA7fD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,IAAW,gBAAgB;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;IAChD,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,IAAW,oCAAoC;QAC7C,OAAO,IAAI,CAAC,gBAAgB,CAAC,oCAAoC,CAAC;IACpE,CAAC;IAED;;;;OAIG;IACH,IAAW,iCAAiC;QAC1C,OAAO,IAAI,CAAC,gBAAgB,CAAC,iCAAiC,CAAC;IACjE,CAAC;IAED;;;;OAIG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAW,2BAA2B;QACpC,OAAO,IAAI,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;IAC3D,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB;QAC/B,OAAO,IAAI,CAAC,gBAAgB,CAAC,sBAAsB,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,kBAAkB;QAC3B,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC;IACtD,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CA8BF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { HttpResponse, isNode } from \"@azure/core-http\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\n\nimport {\n BlobDownloadHeaders,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n} from \"./generatedModels\";\nimport { BlobDownloadResponseParsed, Metadata, ObjectReplicationPolicy } from \"./models\";\nimport {\n ReadableStreamGetter,\n RetriableReadableStream,\n RetriableReadableStreamOptions,\n} from \"./utils/RetriableReadableStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nexport class BlobDownloadResponse implements BlobDownloadResponseParsed {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return this.originalResponse.copyCompletedOn;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n public get tagCount(): number | undefined {\n return this.originalResponse.tagCount;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n public get lastAccessed(): Date | undefined {\n return this.originalResponse.lastAccessed;\n }\n\n /**\n * Returns the date and time the blob was created.\n *\n * @readonly\n */\n public get createdOn(): Date | undefined {\n return this.originalResponse.createdOn;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n public get versionId(): string | undefined {\n return this.originalResponse.versionId;\n }\n\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n public get isCurrentVersion(): boolean | undefined {\n return this.originalResponse.isCurrentVersion;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n public get objectReplicationDestinationPolicyId(): string | undefined {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n }\n\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n public get objectReplicationSourceProperties(): ObjectReplicationPolicy[] | undefined {\n return this.originalResponse.objectReplicationSourceProperties;\n }\n\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n public get isSealed(): boolean | undefined {\n return this.originalResponse.isSealed;\n }\n\n /**\n * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.\n *\n * @readonly\n */\n public get immutabilityPolicyExpiresOn(): Date | undefined {\n return this.originalResponse.immutabilityPolicyExpiresOn;\n }\n\n /**\n * Indicates immutability policy mode.\n *\n * @readonly\n */\n public get immutabilityPolicyMode(): BlobImmutabilityPolicyMode | undefined {\n return this.originalResponse.immutabilityPolicyMode;\n }\n\n /**\n * Indicates if a legal hold is present on the blob.\n *\n * @readonly\n */\n public get legalHold(): boolean | undefined {\n return this.originalResponse.legalHold;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get contentAsBlob(): Promise | undefined {\n return this.originalResponse.blobBody;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobDownloadResponseParsed;\n private blobDownloadStream?: RetriableReadableStream;\n\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n public constructor(\n originalResponse: BlobDownloadResponseParsed,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(\n this.originalResponse.readableStreamBody!,\n getter,\n offset,\n count,\n options\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js new file mode 100644 index 0000000..796c15b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { generateUuid } from "@azure/core-http"; +import { StorageClientContext } from "./generated/src/index"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { Blob as StorageBlob, Container } from "./generated/src/operations"; +import { ETagNone } from "./utils/constants"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new StorageBlob(clientContext); + } + if (!leaseId) { + leaseId = generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=BlobLeaseClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map new file mode 100644 index 0000000..6ab89f4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobLeaseClient.js","sourceRoot":"","sources":["../../../src/BlobLeaseClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,YAAY,EAAgB,MAAM,kBAAkB,CAAC;AAC9D,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAG7D,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,IAAI,IAAI,WAAW,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAG5E,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAqFjF;;GAEG;AACH,MAAM,OAAO,eAAe;IAwB1B;;;;OAIG;IACH,YAAY,MAAoC,EAAE,OAAgB;QAChE,MAAM,aAAa,GAAG,IAAI,oBAAoB,CAC5C,MAAM,CAAC,GAAG,EACT,MAAc,CAAC,QAAQ,CAAC,sBAAsB,EAAE,CAClD,CAAC;QACF,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC;QAEvB,IAAK,MAAqB,CAAC,IAAI,KAAK,SAAS,EAAE;YAC7C,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;YACzB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,aAAa,CAAC,CAAC;SAC/D;aAAM;YACL,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;YAC1B,IAAI,CAAC,yBAAyB,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,CAAC;SACjE;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,YAAY,EAAE,CAAC;SAC1B;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IA1CD;;;;OAIG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IA4BD;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,YAAY,CACvB,QAAgB,EAChB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EACR,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EAAE,IAAI,CAAC,QAAQ,IAC3B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,WAAW,CACtB,eAAuB,EACvB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC/D,IAAI,CAAC,QAAQ,EACb,eAAe,kBAEb,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YACF,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC;YAChC,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,YAAY,CAAC,UAAiC,EAAE;;QAC3D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,kBACpE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,UAAU,CAAC,UAAiC,EAAE;;QACzD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,kBAClE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,WAAmB,EACnB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,MAAM,gBAAgB,mBACpB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EACX,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,CACtD,CAAC;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;SAC1E;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { generateUuid, HttpResponse } from \"@azure/core-http\";\nimport { StorageClientContext } from \"./generated/src/index\";\nimport { ContainerBreakLeaseOptionalParams } from \"./generatedModels\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Blob as StorageBlob, Container } from \"./generated/src/operations\";\nimport { ModifiedAccessConditions } from \"./models\";\nimport { CommonOptions } from \"./StorageClient\";\nimport { ETagNone } from \"./utils/constants\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobClient } from \"./Clients\";\nimport { ContainerClient } from \"./ContainerClient\";\n\n/**\n * The details for a specific lease.\n */\nexport interface Lease {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally. If the request version is 2011-08-18 or\n * newer, the ETag value will be in quotes.\n */\n etag?: string;\n /**\n * Returns the date and time the container was\n * last modified. Any operation that modifies the blob, including an update\n * of the blob's metadata or properties, changes the last-modified time of\n * the blob.\n */\n lastModified?: Date;\n /**\n * Uniquely identifies a container's lease\n */\n leaseId?: string;\n /**\n * Approximate time remaining in the lease\n * period, in seconds.\n */\n leaseTime?: number;\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n */\n requestId?: string;\n /**\n * Indicates the version of the Blob service used\n * to execute the request. This header is returned for requests made against\n * version 2009-09-19 and above.\n */\n version?: string;\n /**\n * UTC date/time value generated by the service that\n * indicates the time at which the response was initiated\n */\n date?: Date;\n /**\n * Error code if any associated with the response that returned\n * the Lease information.\n */\n errorCode?: string;\n}\n\n/**\n * Contains the response data for operations that create, modify, or delete a lease.\n *\n * See {@link BlobLeaseClient}.\n */\nexport type LeaseOperationResponse = Lease & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: Lease;\n };\n};\n\n/**\n * Configures lease operations.\n */\nexport interface LeaseOperationOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nexport class BlobLeaseClient {\n private _leaseId: string;\n private _url: string;\n private _containerOrBlobOperation: Container | StorageBlob;\n private _isContainer: boolean;\n\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n public get leaseId(): string {\n return this._leaseId;\n }\n\n /**\n * Gets the url.\n *\n * @readonly\n */\n public get url(): string {\n return this._url;\n }\n\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n constructor(client: ContainerClient | BlobClient, leaseId?: string) {\n const clientContext = new StorageClientContext(\n client.url,\n (client as any).pipeline.toServiceClientOptions()\n );\n this._url = client.url;\n\n if ((client as BlobClient).name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n } else {\n this._isContainer = false;\n this._containerOrBlobOperation = new StorageBlob(clientContext);\n }\n\n if (!leaseId) {\n leaseId = generateUuid();\n }\n this._leaseId = leaseId;\n }\n\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n public async acquireLease(\n duration: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-acquireLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n proposedLeaseId: this._leaseId,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n public async changeLease(\n proposedLeaseId: string,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-changeLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const response = await this._containerOrBlobOperation.changeLease(\n this._leaseId,\n proposedLeaseId,\n {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n this._leaseId = proposedLeaseId;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n public async releaseLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-releaseLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n public async renewLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-renewLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n public async breakLease(\n breakPeriod: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-breakLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const operationOptions: ContainerBreakLeaseOptionalParams = {\n abortSignal: options.abortSignal,\n breakPeriod,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n };\n return await this._containerOrBlobOperation.breakLease(operationOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js new file mode 100644 index 0000000..f78c5b5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js @@ -0,0 +1,362 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN BROWSER RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in browser runtime it will + * parse avor data returned by blob query. + */ +export class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, _options = {}) { + this.originalResponse = originalResponse; + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + throw Error(`Quick query in browser is not supported yet.`); + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * @readonly + */ + get readableStreamBody() { + return undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobQueryResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map new file mode 100644 index 0000000..6d4069e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQueryResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobQueryResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAiBlC;;;;;GAKG;AACH,MAAM,OAAO,iBAAiB;IA+X5B;;;;;OAKG;IACH,YACE,gBAAwC,EACxC,WAAwC,EAAE;QAE1C,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;IAC3C,CAAC;IAzYD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,MAAM,KAAK,CAAC,8CAA8C,CAAC,CAAC;IAC9D,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CAgBF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN BROWSER RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in browser runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n throw Error(`Quick query in browser is not supported yet.`);\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n _options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js new file mode 100644 index 0000000..91dac64 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js @@ -0,0 +1,367 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-http"; +import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +export class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobQueryResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map new file mode 100644 index 0000000..44f63e6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQueryResponse.js","sourceRoot":"","sources":["../../../src/BlobQueryResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAgB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAaxD,OAAO,EAAE,oBAAoB,EAA+B,MAAM,8BAA8B,CAAC;AAEjG;;;;;GAKG;AACH,MAAM,OAAO,iBAAiB;IAkY5B;;;;;OAKG;IACH,YACE,gBAAwC,EACxC,UAAuC,EAAE;QAEzC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,oBAAoB,CAChD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,OAAO,CACR,CAAC;IACJ,CAAC;IAhZD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,kBAAkB;QAC3B,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC;IACtD,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CAqBF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse, isNode } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStream, BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n return undefined;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n private blobDownloadStream?: BlobQuickQueryStream;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(\n this.originalResponse.readableStreamBody!,\n options\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js new file mode 100644 index 0000000..5095435 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js @@ -0,0 +1,786 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isTokenCredential, isNode, getDefaultProxySettings, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { Container, Service } from "./generated/src/operations"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { ContainerClient, } from "./ContainerClient"; +import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, toTags, } from "./utils/utils.common"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import "@azure/core-paging"; +import { truncatedISO8061Date } from "./utils/utils.common"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { BlobBatchClient } from "./BlobBatchClient"; +import { StorageClient } from "./StorageClient"; +import { AccountSASPermissions } from "./sas/AccountSASPermissions"; +import { generateAccountSASQueryParameters } from "./sas/AccountSASSignatureValues"; +import { AccountSASServices } from "./sas/AccountSASServices"; +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield __await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield __await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return __asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield __await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield __await(yield __await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return __asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} +//# sourceMappingURL=BlobServiceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map new file mode 100644 index 0000000..4645341 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobServiceClient.js","sourceRoot":"","sources":["../../../src/BlobServiceClient.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAEL,iBAAiB,EACjB,MAAM,EAEN,uBAAuB,GACxB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAsBrD,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAChE,OAAO,EAAE,WAAW,EAAwC,cAAc,EAAE,MAAM,YAAY,CAAC;AAC/F,OAAO,EACL,eAAe,GAGhB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,4BAA4B,EAC5B,MAAM,GACP,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,oBAAoB,CAAC;AAE5B,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,qBAAqB,EAAE,MAAM,6BAA6B,CAAC;AAGpE,OAAO,EAAE,iCAAiC,EAAE,MAAM,iCAAiC,CAAC;AACpF,OAAO,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AA4S9D;;;GAGG;AACH,MAAM,OAAO,iBAAkB,SAAQ,aAAa;IAuGlD,YACE,GAAW,EACX,oBAIgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;SACjC;aAAM,IACL,CAAC,MAAM,IAAI,oBAAoB,YAAY,0BAA0B,CAAC;YACtE,oBAAoB,YAAY,mBAAmB;YACnD,iBAAiB,CAAC,oBAAoB,CAAC,EACvC;YACA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;SACvD;aAAM;YACL,8DAA8D;YAC9D,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAC/D,CAAC;IA3HD;;;;;;;;;;;OAWG;IACI,MAAM,CAAC,oBAAoB,CAChC,gBAAwB;IACxB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,MAAM,cAAc,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QACtE,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;YAC/C,IAAI,MAAM,EAAE;gBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;gBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;oBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;iBACzE;gBAED,MAAM,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;gBAC3D,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;SACF;aAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;YAClD,MAAM,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;YACjE,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,GAAG,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;SAC9F;aAAM;YACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;SACH;IACH,CAAC;IAiFD;;;;;;;;;;;OAWG;IACI,kBAAkB,CAAC,aAAqB;QAC7C,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EAC5D,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,eAAe,CAC1B,aAAqB,EACrB,UAAkC,EAAE;QAKpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,MAAM,uBAAuB,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC7E,OAAO;gBACL,eAAe;gBACf,uBAAuB;aACxB,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,eAAe,CAC1B,aAAqB,EACrB,UAAwC,EAAE;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,OAAO,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SACrD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,iBAAiB,CAC5B,oBAA4B,EAC5B,uBAA+B,EAC/B,UAA2C,EAAE;QAK7C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAC7C,OAAO,CAAC,wBAAwB,IAAI,oBAAoB,CACzD,CAAC;YACF,qCAAqC;YACrC,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,yBAAyB,GAAG,MAAM,gBAAgB,CAAC,OAAO,iBAC9D,oBAAoB;gBACpB,uBAAuB,IACpB,cAAc,EACjB,CAAC;YACH,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,CAAC;SACvD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACH,gEAAgE;IAChE,8HAA8H;IACtH,KAAK,CAAC,eAAe,CAC3B,mBAA2B,EAC3B,wBAAgC,EAChC,UAAyC,EAAE;;QAK3C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,wBAAwB,CAAC,CAAC;YAC1E,qCAAqC;YACrC,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,uBAAuB,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,mBAAmB,kCAC5E,cAAc,KACjB,aAAa,EAAE,MAAA,OAAO,CAAC,eAAe,0CAAE,OAAO,IAC/C,CAAC;YACH,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,CAAC;SACrD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,aAAa,CACxB,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,UAAiC,EACjC,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,UAAU,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,cAAc,CACzB,UAAwC,EAAE;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACK,KAAK,CAAC,qBAAqB,CACjC,MAAe,EACf,UAA+C,EAAE;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAEhG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,qBAAqB,6CACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,IACH,OAAO,KACV,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,KAC/E,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IACK,KAAK,CAAC,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QAEF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,iBACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,MAAA,IAAI,CAAC,IAAI,0CAAE,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;qBAC1C;oBACD,uCAAY,IAAI,KAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,IAAG;gBACxD,CAAC,CAAC,GACH,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACY,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE;;YAElD,IAAI,QAAQ,CAAC;YACb,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,QAAQ,GAAG,cAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;oBACtC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,oBAAM,QAAQ,CAAA,CAAC;iBAChB,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACY,oBAAoB,CACjC,sBAA8B,EAC9B,UAAgD,EAAE;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;iBACtB;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA8EG;IACI,eAAe,CACpB,sBAA8B,EAC9B,UAAwC,EAAE;QAE1C,8CAA8C;QAC9C,MAAM,kBAAkB,qBACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,kBACpF,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACY,YAAY,CACzB,MAAe,EACf,UAA+C,EAAE;;YAEjD,IAAI,6BAA6B,CAAC;YAClC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,6BAA6B,GAAG,cAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBAClF,6BAA6B,CAAC,cAAc;wBAC1C,6BAA6B,CAAC,cAAc,IAAI,EAAE,CAAC;oBACrD,MAAM,GAAG,6BAA6B,CAAC,iBAAiB,CAAC;oBACzD,oBAAM,cAAM,6BAA6B,CAAA,CAAA,CAAC;iBAC3C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;OAIG;IACY,SAAS,CACtB,UAA+C,EAAE;;;YAEjD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,IAAA;oBAAnD,MAAM,OAAO,WAAA,CAAA;oBACtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,cAAc,CAAA,CAAA,CAAA,CAAC;iBAC/B;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAyEG;IACI,cAAc,CACnB,UAAwC,EAAE;QAE1C,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,OAAO,GAAgC,EAAE,CAAC;QAChD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;SACxB;QAED,mDAAmD;QACnD,MAAM,kBAAkB,mCACnB,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAC3C,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC;QAChD,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,kBACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,oBAAoB,CAC/B,QAAc,EACd,SAAe,EACf,UAA8C,EAAE;QAEhD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAC/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,oBAAoB,CAC7D;gBACE,QAAQ,EAAE,oBAAoB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBAC/C,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE,KAAK,CAAC;aAClD,kBAEC,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YAEF,MAAM,iBAAiB,GAAG;gBACxB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC;gBACjD,eAAe,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,KAAK,EAAE,QAAQ,CAAC,KAAK;aACtB,CAAC;YAEF,MAAM,GAAG,mBACP,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe,EACzC,OAAO,EAAE,QAAQ,CAAC,OAAO,EACzB,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,SAAS,EAAE,QAAQ,CAAC,SAAS,IAC1B,iBAAiB,CACrB,CAAC;YAEF,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,qBAAqB,CAC1B,SAAgB,EAChB,cAAqC,qBAAqB,CAAC,KAAK,CAAC,GAAG,CAAC,EACrE,gBAAwB,KAAK,EAC7B,UAA+C,EAAE;QAEjD,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;YAC5D,MAAM,UAAU,CACd,+FAA+F,CAChG,CAAC;SACH;QAED,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,SAAS,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC;SACnD;QAED,MAAM,GAAG,GAAG,iCAAiC,iBAEzC,WAAW;YACX,SAAS;YACT,aAAa,EACb,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAC/C,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;QAEb,OAAO,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACzC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport {\n TokenCredential,\n isTokenCredential,\n isNode,\n HttpResponse,\n getDefaultProxySettings,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n ServiceGetUserDelegationKeyHeaders,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ServiceGetPropertiesResponse,\n BlobServiceProperties,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentResponse,\n ContainerItem,\n UserDelegationKeyModel,\n ContainerUndeleteResponse,\n FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n ContainerRenameResponse,\n LeaseAccessConditions,\n FilterBlobSegment,\n FilterBlobItem,\n} from \"./generatedModels\";\nimport { Container, Service } from \"./generated/src/operations\";\nimport { newPipeline, StoragePipelineOptions, PipelineLike, isPipelineLike } from \"./Pipeline\";\nimport {\n ContainerClient,\n ContainerCreateOptions,\n ContainerDeleteMethodOptions,\n} from \"./ContainerClient\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n toTags,\n} from \"./utils/utils.common\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport \"@azure/core-paging\";\nimport { PageSettings, PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { truncatedISO8061Date } from \"./utils/utils.common\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { AccountSASPermissions } from \"./sas/AccountSASPermissions\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateAccountSASQueryParameters } from \"./sas/AccountSASSignatureValues\";\nimport { AccountSASServices } from \"./sas/AccountSASServices\";\nimport { ListContainersIncludeType } from \"./generated/src\";\n\n/**\n * Options to configure the {@link BlobServiceClient.getProperties} operation.\n */\nexport interface ServiceGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.setProperties} operation.\n */\nexport interface ServiceSetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getAccountInfo} operation.\n */\nexport interface ServiceGetAccountInfoOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getStatistics} operation.\n */\nexport interface ServiceGetStatisticsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the Service - Get User Delegation Key.\n */\nexport interface ServiceGetUserDelegationKeyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainerSegment} operation.\n */\ninterface ServiceListContainersSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify that the container's metadata be returned as part of the response\n * body. Possible values include: 'metadata'\n */\n include?: ListContainersIncludeType | ListContainersIncludeType[];\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainers} operation.\n */\nexport interface ServiceListContainersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies whether the container's metadata\n * should be returned as part of the response body.\n */\n includeMetadata?: boolean;\n\n /**\n * Specifies whether soft deleted containers should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether system containers should be included in the response.\n */\n includeSystem?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTagsSegment} operation.\n */\ninterface ServiceFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ServiceFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ServiceFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A user delegation key.\n */\nexport interface UserDelegationKey {\n /**\n * The Azure Active Directory object ID in GUID format.\n */\n signedObjectId: string;\n /**\n * The Azure Active Directory tenant ID in GUID format.\n */\n signedTenantId: string;\n /**\n * The date-time the key is active.\n */\n signedStartsOn: Date;\n /**\n * The date-time the key expires.\n */\n signedExpiresOn: Date;\n /**\n * Abbreviation of the Azure Storage service that accepts the key.\n */\n signedService: string;\n /**\n * The service version that created the key.\n */\n signedVersion: string;\n /**\n * The key as a base64 string.\n */\n value: string;\n}\n\n/**\n * Contains response data for the {@link getUserDelegationKey} operation.\n */\nexport declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey &\n ServiceGetUserDelegationKeyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: UserDelegationKeyModel;\n };\n };\n\n/**\n * Options to configure {@link BlobServiceClient.undeleteContainer} operation.\n */\nexport interface ServiceUndeleteContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies the new name of the restored container.\n * Will use its original name if this is not specified.\n * @deprecated Restore container to a different name is not supported by service anymore.\n */\n destinationContainerName?: string;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.renameContainer} operation.\n */\nexport interface ServiceRenameContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Condition to meet for the source container.\n */\n sourceCondition?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation.\n */\nexport interface ServiceGenerateAccountSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nexport class BlobServiceClient extends StorageClient {\n /**\n * serviceContext provided by protocol layer.\n */\n private serviceContext: Service;\n\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n public static fromConnectionString(\n connectionString: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ): BlobServiceClient {\n options = options || {};\n const extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n const pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n const pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n }\n\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n *\n * Example using DefaultAzureCredential from `@azure/identity`:\n *\n * ```js\n * const account = \"\";\n *\n * const defaultAzureCredential = new DefaultAzureCredential();\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * defaultAzureCredential\n * );\n * ```\n *\n * Example using an account name/key:\n *\n * ```js\n * const account = \"\"\n * const sharedKeyCredential = new StorageSharedKeyCredential(account, \"\");\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * sharedKeyCredential\n * );\n * ```\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (\n (isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipeline)\n ) {\n pipeline = newPipeline(credentialOrPipeline, options);\n } else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n super(url, pipeline);\n this.serviceContext = new Service(this.storageClientContext);\n }\n\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n public getContainerClient(containerName: string): ContainerClient {\n return new ContainerClient(\n appendToURLPath(this.url, encodeURIComponent(containerName)),\n this.pipeline\n );\n }\n\n /**\n * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n public async createContainer(\n containerName: string,\n options: ContainerCreateOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerCreateResponse: ContainerCreateResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-createContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n const containerCreateResponse = await containerClient.create(updatedOptions);\n return {\n containerClient,\n containerCreateResponse,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n public async deleteContainer(\n containerName: string,\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-deleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n return await containerClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n public async undeleteContainer(\n deletedContainerName: string,\n deletedContainerVersion: string,\n options: ServiceUndeleteContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerUndeleteResponse: ContainerUndeleteResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-undeleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(\n options.destinationContainerName || deletedContainerName\n );\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerUndeleteResponse = await containerContext.restore({\n deletedContainerName,\n deletedContainerVersion,\n ...updatedOptions,\n });\n return { containerClient, containerUndeleteResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n private async renameContainer(\n sourceContainerName: string,\n destinationContainerName: string,\n options: ServiceRenameContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerRenameResponse: ContainerRenameResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-renameContainer\", options);\n try {\n const containerClient = this.getContainerClient(destinationContainerName);\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerRenameResponse = await containerContext.rename(sourceContainerName, {\n ...updatedOptions,\n sourceLeaseId: options.sourceCondition?.leaseId,\n });\n return { containerClient, containerRenameResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n public async getProperties(\n options: ServiceGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getProperties\", options);\n try {\n return await this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n public async setProperties(\n properties: BlobServiceProperties,\n options: ServiceSetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-setProperties\", options);\n try {\n return await this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n public async getStatistics(\n options: ServiceGetStatisticsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getStatistics\", options);\n try {\n return await this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n public async getAccountInfo(\n options: ServiceGetAccountInfoOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getAccountInfo\", options);\n try {\n return await this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n private async listContainersSegment(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-listContainersSegment\", options);\n\n try {\n return await this.serviceContext.listContainersSegment({\n abortSignal: options.abortSignal,\n marker,\n ...options,\n include: typeof options.include === \"string\" ? [options.include] : options.include,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"BlobServiceClient-findBlobsByTagsSegment\",\n options\n );\n\n try {\n const response = await this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ServiceFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ServiceFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n private async *listSegments(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let listContainersSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listContainersSegmentResponse = await this.listContainersSegment(marker, options);\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n yield await listContainersSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n private async *listItems(\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.listSegments(marker, options)) {\n yield* segment.containerItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listContainers(\n options: ServiceListContainersOptions = {}\n ): PagedAsyncIterableIterator {\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const include: ListContainersIncludeType[] = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSystem) {\n include.push(\"system\");\n }\n\n // AsyncIterableIterator to iterate over containers\n const listSegmentOptions: ServiceListContainersSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include } : {}),\n };\n\n const iter = this.listItems(listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n public async getUserDelegationKey(\n startsOn: Date,\n expiresOn: Date,\n options: ServiceGetUserDelegationKeyOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getUserDelegationKey\", options);\n try {\n const response = await this.serviceContext.getUserDelegationKey(\n {\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false),\n },\n {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n const userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value,\n };\n\n const res: ServiceGetUserDelegationKeyResponse = {\n _response: response._response,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n version: response.version,\n date: response.date,\n errorCode: response.errorCode,\n ...userDelegationKey,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateAccountSasUrl(\n expiresOn?: Date,\n permissions: AccountSASPermissions = AccountSASPermissions.parse(\"r\"),\n resourceTypes: string = \"sco\",\n options: ServiceGenerateAccountSasUrlOptions = {}\n ): string {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\n \"Can only generate the account SAS when the client is initialized with a shared key credential\"\n );\n }\n\n if (expiresOn === undefined) {\n const now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n\n const sas = generateAccountSASQueryParameters(\n {\n permissions,\n expiresOn,\n resourceTypes,\n services: AccountSASServices.parse(\"b\").toString(),\n ...options,\n },\n this.credential\n ).toString();\n\n return appendToURLQuery(this.url, sas);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js new file mode 100644 index 0000000..37cafbc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js @@ -0,0 +1,2757 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +import { generateUuid, getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { BlobDownloadResponse } from "./BlobDownloadResponse"; +import { BlobQueryResponse } from "./BlobQueryResponse"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from "./generated/src/operations"; +import { ensureCpkIfSpecified, toAccessTier, } from "./models"; +import { rangeResponseFromModel, } from "./PageBlobRangeResponse"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { BlobBeginCopyFromUrlPoller, } from "./pollers/BlobStartCopyFromUrlPoller"; +import { rangeToString } from "./Range"; +import { StorageClient } from "./StorageClient"; +import { Batch } from "./utils/Batch"; +import { BufferScheduler } from "../../storage-common/src"; +import { BlobDoesNotUseCustomerSpecifiedEncryption, BlobUsesCustomerSpecifiedEncryptionMsg, BLOCK_BLOB_MAX_BLOCKS, BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES, BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES, DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES, DEFAULT_BLOCK_BUFFER_SIZE_BYTES, DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS, ETagAny, URLConstants, } from "./utils/constants"; +import { createSpan, convertTracingToRequestOptionsBase } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, ExtractPageRangeInfoItems, generateBlockID, getURLParameter, httpAuthorizationToString, isIpEndpointStyle, parseObjectReplicationRecord, setURLParameter, toBlobTags, toBlobTagsString, toQuerySerialization, toTags, } from "./utils/utils.common"; +import { fsCreateReadStream, fsStat, readStreamToLocalFile, streamToBuffer, } from "./utils/utils.node"; +import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; +import { BlobLeaseClient } from "./BlobLeaseClient"; +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new StorageBlob(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new StorageBlob(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + return __asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield __await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield __await(yield __await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems(offset = 0, count, options = {}) { + return __asyncGenerator(this, arguments, function* listPageRangeItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset: offset, + count: count, + }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return __asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield __await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield __await(yield __await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return __asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=Clients.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map new file mode 100644 index 0000000..a0c782c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Clients.js","sourceRoot":"","sources":["../../../src/Clients.ts"],"names":[],"mappings":";AAGA,OAAO,EACL,YAAY,EACZ,uBAAuB,EAGvB,MAAM,EACN,iBAAiB,EAGjB,UAAU,GACX,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAGrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,IAAI,IAAI,WAAW,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,4BAA4B,CAAC;AA8ClG,OAAO,EAKL,oBAAoB,EAMpB,YAAY,GASb,MAAM,UAAU,CAAC;AAClB,OAAO,EAGL,sBAAsB,GACvB,MAAM,yBAAyB,CAAC;AACjC,OAAO,EAAE,WAAW,EAAgB,cAAc,EAA0B,MAAM,YAAY,CAAC;AAC/F,OAAO,EACL,0BAA0B,GAG3B,MAAM,sCAAsC,CAAC;AAC9C,OAAO,EAAS,aAAa,EAAE,MAAM,SAAS,CAAC;AAC/C,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3D,OAAO,EACL,yCAAyC,EACzC,sCAAsC,EACtC,qBAAqB,EACrB,gCAAgC,EAChC,gCAAgC,EAChC,iCAAiC,EACjC,+BAA+B,EAC/B,mCAAmC,EACnC,OAAO,EACP,YAAY,GACb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,UAAU,EAAE,kCAAkC,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,4BAA4B,EAC5B,yBAAyB,EACzB,eAAe,EACf,eAAe,EACf,yBAAyB,EACzB,iBAAiB,EACjB,4BAA4B,EAC5B,eAAe,EACf,UAAU,EACV,gBAAgB,EAChB,oBAAoB,EACpB,MAAM,GACP,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EACL,kBAAkB,EAClB,MAAM,EACN,qBAAqB,EACrB,cAAc,GACf,MAAM,oBAAoB,CAAC;AAG5B,OAAO,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AA2sBpD;;;GAGG;AACH,MAAM,OAAO,UAAW,SAAQ,aAAa;IAqF3C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,IAAI,iBAAiB,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBAC9D,OAAO,GAAG,iBAA2C,CAAC;aACvD;YACD,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QAED,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,IAAI,CAAC,cAAc,EAAE;YAC3D,IAAI,CAAC,+BAA+B,EAAE,CAAC,CAAC;QAC1C,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAE9D,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAW,CAAC;QACvF,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAW,CAAC;IAC3F,CAAC;IAxKD;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IA8JD;;;;;;OAMG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACI,WAAW,CAAC,SAAiB;QAClC,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,SAAS,EACjC,SAAS,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAC/C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;OAGG;IACI,mBAAmB;QACxB,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACrD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA0DG;IACI,KAAK,CAAC,QAAQ,CACnB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA+B,EAAE;;QAEjC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEhE,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAE5E,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACzC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,kBAAkB,EAAE,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,+DAA+D;iBAC7H,EACD,KAAK,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAC5E,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB,EAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,UAAU,mCACX,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,GAC5F,CAAC;YACF,sCAAsC;YACtC,IAAI,CAAC,MAAM,EAAE;gBACX,OAAO,UAAU,CAAC;aACnB;YAED,8EAA8E;YAC9E,uEAAuE;YACvE,uEAAuE;YACvE,sGAAsG;YACtG,gDAAgD;YAChD,IAAI,OAAO,CAAC,gBAAgB,KAAK,SAAS,IAAI,OAAO,CAAC,gBAAgB,GAAG,CAAC,EAAE;gBAC1E,uDAAuD;gBACvD,OAAO,CAAC,gBAAgB,GAAG,mCAAmC,CAAC;aAChE;YAED,IAAI,GAAG,CAAC,aAAa,KAAK,SAAS,EAAE;gBACnC,MAAM,IAAI,UAAU,CAAC,oEAAoE,CAAC,CAAC;aAC5F;YAED,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;gBACb,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;aAClF;YAED,OAAO,IAAI,oBAAoB,CAC7B,UAAU,EACV,KAAK,EAAE,KAAa,EAAkC,EAAE;;gBACtD,MAAM,sBAAsB,GAA+B;oBACzD,qBAAqB,EAAE,OAAO,CAAC,UAAU;oBACzC,wBAAwB,EAAE;wBACxB,OAAO,EAAE,OAAO,CAAC,UAAW,CAAC,OAAO,IAAI,GAAG,CAAC,IAAI;wBAChD,eAAe,EAAE,OAAO,CAAC,UAAW,CAAC,eAAe;wBACpD,WAAW,EAAE,OAAO,CAAC,UAAW,CAAC,WAAW;wBAC5C,iBAAiB,EAAE,OAAO,CAAC,UAAW,CAAC,iBAAiB;wBACxD,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa;qBAC1C;oBACD,KAAK,EAAE,aAAa,CAAC;wBACnB,KAAK,EAAE,MAAM,GAAG,GAAG,CAAC,aAAc,GAAG,KAAK;wBAC1C,MAAM,EAAE,KAAK;qBACd,CAAC;oBACF,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;oBAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ;oBAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB;iBACrC,CAAC;gBAEF,qBAAqB;gBACrB,eAAe;gBACf,0CAA0C;gBAC1C,2BAA2B;gBAC3B,mDAAmD;gBACnD,KAAK;gBAEL,OAAO,CACL,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBAC7B,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,sBAAsB,EACzB,CACH,CAAC,kBAAmB,CAAC;YACxB,CAAC,EACD,MAAM,EACN,GAAG,CAAC,aAAc,EAClB;gBACE,gBAAgB,EAAE,OAAO,CAAC,gBAAgB;gBAC1C,UAAU,EAAE,OAAO,CAAC,UAAU;aAC/B,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAA6B,EAAE;QACjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;gBAChD,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,cAAc,EAAE,cAAc,CAAC,cAAc;aAC9C,CAAC,CAAC;YACH,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,kDAAkD;gBAClD,OAAO,KAAK,CAAC;aACd;iBAAM,IACL,CAAC,CAAC,UAAU,KAAK,GAAG;gBACpB,CAAC,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,sCAAsC;oBAC7D,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,yCAAyC,CAAC,EACpE;gBACA,kDAAkD;gBAClD,OAAO,IAAI,CAAC;aACb;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,aAAa,CACxB,UAAoC,EAAE;;QAEtC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,aAAa,iBAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,uCACK,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,IAC3F;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAA6B,EAAE;;QACjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,iBAClC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,cAAc,CACzB,UAA6B,EAAE;;QAE/B,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,cAAc,EAAE;gBAC3C,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,wEAAwE;iBAClF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,QAAQ,CAAC,UAA+B,EAAE;QACrD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAC5E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACpC,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACI,KAAK,CAAC,cAAc,CACzB,eAAiC,EACjC,UAAqC,EAAE;;QAEvC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,eAAe,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAGxC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,WAAW,CACtB,QAAmB,EACnB,UAAkC,EAAE;;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,iBACvC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,OAAO,CAAC,IAAU,EAAE,UAA8B,EAAE;;QAC/D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,+BACnC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,KACrD,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,IACtB,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,OAAO,CAAC,UAA8B,EAAE;;QACnD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,IAAI,EAAE,MAAM,CAAC,EAAE,UAAU,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE,GACxD,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,kBAAkB,CAAC,cAAuB;QAC/C,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,cAAc,CACzB,UAAqC,EAAE;;QAEvC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuEG;IACI,KAAK,CAAC,gBAAgB,CAC3B,UAAkB,EAClB,UAAuC,EAAE;QAOzC,MAAM,MAAM,GAAyB;YACnC,gBAAgB,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;YAC7D,aAAa,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;YACvD,gBAAgB,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;SAC9D,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,0BAA0B,CAAC;YAC5C,UAAU,EAAE,MAAM;YAClB,UAAU;YACV,YAAY,EAAE,OAAO,CAAC,YAAY;YAClC,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,uBAAuB,EAAE,OAAO;SACjC,CAAC,CAAC;QAEH,qDAAqD;QACrD,8DAA8D;QAC9D,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QAEpB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,gBAAgB,CAC3B,MAAc,EACd,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,MAAM,kBACnD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,IACtC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,eAAe,CAC1B,UAAkB,EAClB,UAAsC,EAAE;;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,kBAClD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,cAAc,EAAE,OAAO,CAAC,cAAc,IACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,aAAa,CACxB,IAAkD,EAClD,UAA8B,EAAE;;QAEhC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,YAAY,CAAC,IAAI,CAAE,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IA8CM,KAAK,CAAC,gBAAgB,CAC3B,MAAwB,EACxB,MAAe,EACf,MAA6C,EAC7C,SAAsC,EAAE;QAExC,IAAI,MAA0B,CAAC;QAC/B,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,MAAM,CAAC;QACrB,IAAI,MAAM,YAAY,MAAM,EAAE;YAC5B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;YACrB,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;SACjD;aAAM;YACL,MAAM,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YACjD,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,OAAO,GAAI,MAAsC,IAAI,EAAE,CAAC;SACzD;QACD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;YACF,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;gBACtB,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;aACvB;YACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,EAAE;gBACzB,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC;aACvD;YACD,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;gBAC3B,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;aACvD;YAED,IAAI,MAAM,GAAG,CAAC,EAAE;gBACd,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,EAAE;gBACvB,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC;aAC7D;YAED,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;gBACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;aACzB;YAED,0CAA0C;YAC1C,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,iCACpC,OAAO,KACV,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;gBACH,KAAK,GAAG,QAAQ,CAAC,aAAc,GAAG,MAAM,CAAC;gBACzC,IAAI,KAAK,GAAG,CAAC,EAAE;oBACb,MAAM,IAAI,UAAU,CAClB,UAAU,MAAM,uCAAuC,QAAQ,CAAC,aAAc,EAAE,CACjF,CAAC;iBACH;aACF;YAED,oEAAoE;YACpE,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI;oBACF,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;iBAC9B;gBAAC,OAAO,KAAU,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,0CAA0C,KAAK,qJAAqJ,KAAK,CAAC,OAAO,EAAE,CACpN,CAAC;iBACH;aACF;YAED,IAAI,MAAM,CAAC,MAAM,GAAG,KAAK,EAAE;gBACzB,MAAM,IAAI,UAAU,CAClB,mFAAmF,KAAK,EAAE,CAC3F,CAAC;aACH;YAED,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,GAAG,GAAG,MAAM,EAAE,GAAG,GAAG,MAAM,GAAG,KAAK,EAAE,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC,SAAS,EAAE;gBAC1E,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;oBAC5B,+BAA+B;oBAC/B,IAAI,QAAQ,GAAG,MAAM,GAAG,KAAM,CAAC;oBAC/B,IAAI,GAAG,GAAG,OAAO,CAAC,SAAU,GAAG,QAAQ,EAAE;wBACvC,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,SAAU,CAAC;qBACrC;oBACD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,QAAQ,GAAG,GAAG,EAAE;wBACxD,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,gBAAgB,EAAE,OAAO,CAAC,wBAAwB;wBAClD,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;wBAChD,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,CACtD;qBACF,CAAC,CAAC;oBACH,MAAM,MAAM,GAAG,QAAQ,CAAC,kBAAmB,CAAC;oBAC5C,MAAM,cAAc,CAAC,MAAM,EAAE,MAAO,EAAE,GAAG,GAAG,MAAM,EAAE,QAAQ,GAAG,MAAM,CAAC,CAAC;oBACvE,qEAAqE;oBACrE,sEAAsE;oBACtE,oDAAoD;oBACpD,gBAAgB,IAAI,QAAQ,GAAG,GAAG,CAAC;oBACnC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;qBACvD;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YACjB,OAAO,MAAM,CAAC;SACf;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,cAAc,CACzB,QAAgB,EAChB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA+B,EAAE;QAEjC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,kCAC7C,OAAO,KACV,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;YACH,IAAI,QAAQ,CAAC,kBAAkB,EAAE;gBAC/B,MAAM,qBAAqB,CAAC,QAAQ,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;aACpE;YAED,iEAAiE;YAChE,QAAgB,CAAC,kBAAkB,GAAG,SAAS,CAAC;YACjD,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAEO,+BAA+B;QACrC,IAAI,aAAa,CAAC;QAClB,IAAI,QAAQ,CAAC;QACb,IAAI;YACF,mCAAmC;YACnC,wEAAwE;YACxE,8DAA8D;YAC9D,8EAA8E;YAC9E,oEAAoE;YACpE,oGAAoG;YACpG,6DAA6D;YAE7D,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAE7C,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;gBACjD,gEAAgE;gBAChE,oCAAoC;gBACpC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;gBACtE,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;gBACvC,mGAAmG;gBACnG,6HAA6H;gBAC7H,qDAAqD;gBACrD,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;gBAC9E,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM;gBACL,iDAAiD;gBACjD,oCAAoC;gBACpC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;gBACtE,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;YAED,+GAA+G;YAC/G,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAClD,QAAQ,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;YAExC,mEAAmE;YACnE,0GAA0G;YAC1G,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAExC,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YAED,OAAO,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC;SACpC;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;SAC5F;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACK,KAAK,CAAC,gBAAgB,CAC5B,UAAkB,EAClB,UAAuC,EAAE;;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,UAAU,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;oBACnE,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,aAAa;iBACrD,EACD,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAC5C,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,QAAQ,EAAE,OAAO,CAAC,QAAQ,IACvB,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,cAAc,CAAC,OAAkC;QACtD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;YAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;gBAC5D,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;aACH;YAED,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,EAClC,QAAQ,EAAE,IAAI,CAAC,KAAK,EACpB,YAAY,EAAE,IAAI,CAAC,SAAS,EAC5B,SAAS,EAAE,IAAI,CAAC,UAAU,IACvB,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,wBAAwB,CACnC,OAA6C;QAE7C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,wBAAwB,iBACpD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,qBAAqB,CAChC,kBAA0C,EAC1C,OAA0C;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,qBAAqB,iBACjD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACjC,wBAAwB,EAAE,kBAAkB,CAAC,UAAU,EACvD,sBAAsB,EAAE,kBAAkB,CAAC,UAAU,EACrD,wBAAwB,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,uBAAuB,IACvD,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,YAAY,CACvB,gBAAyB,EACzB,OAAiC;QAEjC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,gBAAgB,kBACzD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AA4ND;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,UAAU;IAsE9C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,qKAAqK;YACrK,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,+DAA+D;YAC/D,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACrE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,gBAAgB,CACzB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,KAAK,CAAC,MAAM,CAAC,UAAmC,EAAE;;QACvD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,kBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,iBAAiB,CAC5B,UAA8C,EAAE;;QAEhD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oCAAoC,EAAE,OAAO,CAAC,CAAC;QAC3F,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;QAC5C,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,iCACxB,cAAc,KACjB,UAAU,IACV,CAAC;YACH,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,4EAA4E;iBACtF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,IAAI,CAAC,UAAiC,EAAE;;QACnD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,iBACtC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACI,KAAK,CAAC,WAAW,CACtB,IAAqB,EACrB,aAAqB,EACrB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,aAAa,EAAE,IAAI,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,KAAK,CAAC,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,KAAa,EACb,UAA+C,EAAE;;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC3D,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAmkBD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,UAAU;IA8E7C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,IAAI,iBAAiB,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBAC9D,OAAO,GAAG,iBAA2C,CAAC;aACvD;YACD,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,eAAe,CACxB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA6BG;IACI,KAAK,CAAC,KAAK,CAChB,KAAa,EACb,UAAiC,EAAE;;QAEnC,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEhE,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAE9E,IAAI;YACF,IAAI,CAAC,MAAM,EAAE;gBACX,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;aAC3E;YACD,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,YAAY,EAAE;oBACZ,SAAS,EAAE,KAAK;oBAChB,UAAU,EAAE,KAAK;oBACjB,kBAAkB,EAAE,oBAAoB,CAAC,OAAO,CAAC,sBAAsB,CAAC;oBACxE,mBAAmB,EAAE,oBAAoB,CAAC,OAAO,CAAC,uBAAuB,CAAC;iBAC3E,EACD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,OAAO,IAAI,iBAAiB,CAAC,QAAQ,EAAE;gBACrC,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;aACzB,CAAC,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACI,KAAK,CAAC,MAAM,CACjB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE;;QAEpC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,kBAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IAEI,KAAK,CAAC,iBAAiB,CAC5B,SAAiB,EACjB,UAA6C,EAAE;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAAE,SAAS,gDACzD,OAAO,KACV,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,aAAa,KAE1C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,OAAO;oBAChD,qBAAqB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,eAAe;oBAChE,iBAAiB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,WAAW;oBACxD,uBAAuB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,iBAAiB;oBACpE,YAAY,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,aAAa;iBACtD,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,cAAc,EAAE,OAAO,CAAC,cAAc,KACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,OAAe,EACf,IAAqB,EACrB,aAAqB,EACrB,UAAsC,EAAE;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,EAAE,aAAa,EAAE,IAAI,kBACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;OAoBG;IACI,KAAK,CAAC,iBAAiB,CAC5B,OAAe,EACf,SAAiB,EACjB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA6C,EAAE;QAE/C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,EAAE,SAAS,kBACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,WAAW,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAClF,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAC5E,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,eAAe,CAC1B,MAAgB,EAChB,UAA2C,EAAE;;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,EAAE,MAAM,EAAE,MAAM,EAAE,kBAEhB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,YAAY,CACvB,QAAuB,EACvB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,kBAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE;gBACxB,GAAG,CAAC,eAAe,GAAG,EAAE,CAAC;aAC1B;YAED,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE;gBAC1B,GAAG,CAAC,iBAAiB,GAAG,EAAE,CAAC;aAC5B;YAED,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED,uBAAuB;IAEvB;;;;;;;;;;;;;;OAcG;IACI,KAAK,CAAC,UAAU,CACrB,IAAmD,EACnD,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,MAAM,EAAE;gBACV,IAAI,MAAc,CAAC;gBACnB,IAAI,IAAI,YAAY,MAAM,EAAE;oBAC1B,MAAM,GAAG,IAAI,CAAC;iBACf;qBAAM,IAAI,IAAI,YAAY,WAAW,EAAE;oBACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBAC5B;qBAAM;oBACL,IAAI,GAAG,IAAuB,CAAC;oBAC/B,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;iBACrE;gBAED,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,EAAU,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAC7E,MAAM,CAAC,UAAU,EACjB,cAAc,CACf,CAAC;aACH;iBAAM;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;gBACrC,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,EAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;aACH;SACF;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;OAkBG;IACI,KAAK,CAAC,iBAAiB,CAC5B,WAAiD,EACjD,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;YAC5C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAc,EAAE,IAAY,EAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACK,KAAK,CAAC,sBAAsB,CAClC,WAA8D,EAC9D,IAAY,EACZ,UAA0C,EAAE;QAE5C,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,GAAG,gCAAgC,EAAE;YACjF,MAAM,IAAI,UAAU,CAClB,wCAAwC,gCAAgC,EAAE,CAC3E,CAAC;SACH;QAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;YACjE,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,CAAC;SAC9D;QACD,IACE,OAAO,CAAC,iBAAiB,GAAG,CAAC;YAC7B,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,EAC5D;YACA,MAAM,IAAI,UAAU,CAClB,gDAAgD,gCAAgC,EAAE,CACnF,CAAC;SACH;QAED,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;YAC3B,IAAI,IAAI,GAAG,gCAAgC,GAAG,qBAAqB,EAAE;gBACnE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,2CAA2C,CAAC,CAAC;aAC1E;YACD,IAAI,IAAI,GAAG,OAAO,CAAC,iBAAiB,EAAE;gBACpC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC,CAAC;gBAC5D,IAAI,OAAO,CAAC,SAAS,GAAG,iCAAiC,EAAE;oBACzD,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;iBACvD;aACF;SACF;QACD,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;YAC5B,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;YACF,IAAI,IAAI,IAAI,OAAO,CAAC,iBAAiB,EAAE;gBACrC,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;aACtE;YAED,MAAM,SAAS,GAAW,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;YACzE,IAAI,SAAS,GAAG,qBAAqB,EAAE;gBACrC,MAAM,IAAI,UAAU,CAClB,6DAA6D;oBAC3D,mCAAmC,qBAAqB,EAAE,CAC7D,CAAC;aACH;YAED,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,MAAM,aAAa,GAAG,YAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YAEjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;gBAClC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAkB,EAAE;oBAC1C,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;oBAClD,MAAM,KAAK,GAAG,OAAO,CAAC,SAAU,GAAG,CAAC,CAAC;oBACrC,MAAM,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,OAAO,CAAC,SAAU,CAAC;oBACpE,MAAM,aAAa,GAAG,GAAG,GAAG,KAAK,CAAC;oBAClC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBACxB,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAE,aAAa,EAAE;wBAC/E,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;wBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;qBAC9C,CAAC,CAAC;oBACH,0FAA0F;oBAC1F,kEAAkE;oBAClE,gBAAgB,IAAI,aAAa,CAAC;oBAClC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAW,CAAC;4BAClB,WAAW,EAAE,gBAAgB;yBAC9B,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YAEjB,OAAO,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;SACxD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACI,KAAK,CAAC,UAAU,CACrB,QAAgB,EAChB,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;YAC3C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE;gBAChB,OAAO,GAAG,EAAE,CACV,kBAAkB,CAAC,QAAQ,EAAE;oBAC3B,SAAS,EAAE,IAAI;oBACf,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ;oBAC1C,KAAK,EAAE,MAAM;iBACd,CAAC,CAAC;YACP,CAAC,EACD,IAAI,kCAEC,OAAO,KACV,cAAc,kCACT,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAG1D,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,YAAY,CACvB,MAAgB,EAChB,aAAqB,+BAA+B,EACpD,iBAAyB,CAAC,EAC1B,UAAwC,EAAE;QAE1C,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;YAC5B,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IAAI;YACF,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,MAAM,aAAa,GAAG,YAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,SAAS,GAAa,EAAE,CAAC;YAE/B,MAAM,SAAS,GAAG,IAAI,eAAe,CACnC,MAAM,EACN,UAAU,EACV,cAAc,EACd,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE;gBACrB,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;gBACzD,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACxB,QAAQ,EAAE,CAAC;gBAEX,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE;oBAC3C,UAAU,EAAE,OAAO,CAAC,UAAU;oBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;oBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;iBAC9C,CAAC,CAAC;gBAEH,0FAA0F;gBAC1F,gBAAgB,IAAI,MAAM,CAAC;gBAC3B,IAAI,OAAO,CAAC,UAAU,EAAE;oBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;iBACvD;YACH,CAAC;YACD,kFAAkF;YAClF,2EAA2E;YAC3E,iDAAiD;YACjD,qCAAqC;YACrC,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CACpC,CAAC;YACF,MAAM,SAAS,CAAC,EAAE,EAAE,CAAC;YAErB,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,kCACtC,OAAO,KACV,cAAc,kCACT,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAoaD;;GAEG;AACH,MAAM,OAAO,cAAe,SAAQ,UAAU;IA8D5C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,eAAe,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,cAAc,CACvB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,kBAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,iBAAiB,CAC5B,IAAY,EACZ,UAA4C,EAAE;;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;YAC5C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,kCAC7B,OAAO,KACV,UAAU,EACV,cAAc,EAAE,cAAc,CAAC,cAAc,IAC7C,CAAC;YACH,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,4EAA4E;iBACtF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,WAAW,CACtB,IAAqB,EACrB,MAAc,EACd,KAAa,EACb,UAAsC,EAAE;;QAExC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,UAAkB,EAClB,KAAa,EACb,UAA6C,EAAE;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAClD,SAAS,EACT,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC9C,CAAC,EACD,aAAa,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,kBAE1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAC5E,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,UAAU,CACrB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAqC,EAAE;;QAEvC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,kBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAwC,EAAE;;QAE1C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,aAAa,iBACZ,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACK,KAAK,CAAC,qBAAqB,CACjC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,UAAgD,EAAE;;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,aAAa,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IACD;;;;;;;;;;;;;OAaG;IACY,yBAAyB,CACtC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,UAAgD,EAAE;;YAElD,IAAI,gCAAgC,CAAC;YACrC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,gCAAgC,GAAG,cAAM,IAAI,CAAC,qBAAqB,CACjE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;oBAC5D,oBAAM,cAAM,gCAAgC,CAAA,CAAA,CAAC;iBAC9C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;OAMG;IACY,kBAAkB,CAC/B,SAAiB,CAAC,EAClB,KAAc,EACd,UAAgD,EAAE;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAAyC,IAAA,KAAA,cAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBALU,MAAM,oBAAoB,WAAA,CAAA;oBAMnC,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;iBACxD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAsEG;IACI,cAAc,CACnB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAyC,EAAE;QAE3C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,kBAC7E,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,OAAO,EACV,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,iBAAiB,CAC5B,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA4C,EAAE;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,iBAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,YAAY,EAAE,YAAY,EAC1B,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACK,KAAK,CAAC,yBAAyB,CACrC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD;;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAChG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,iBAAiB,iBACjD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACjC,qBAAqB,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,EAC1C,wBAAwB,kCACnB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,KACtB,MAAM,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,0CAAE,aAAa,KAE5C,YAAY,EAAE,iBAAiB,EAC/B,KAAK,EAAE,aAAa,CAAC;oBACnB,MAAM,EAAE,MAAM;oBACd,KAAK,EAAE,KAAK;iBACb,CAAC,EACF,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IACD;;;;;;;;;;;;;;;OAeG;IACY,6BAA6B,CAC1C,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD;;YAElD,IAAI,gCAAgC,CAAC;YACrC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,gCAAgC,GAAG,cAAM,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;oBAC5D,oBAAM,cAAM,gCAAgC,CAAA,CAAA,CAAC;iBAC9C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;OAOG;IACY,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,OAAkD;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAAyC,IAAA,KAAA,cAAA,IAAI,CAAC,6BAA6B,CACzE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBANU,MAAM,oBAAoB,WAAA,CAAA;oBAOnC,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;iBACxD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuEG;IACI,kBAAkB,CACvB,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA6C,EAAE;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAE9C,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,KAAK,EAAE,YAAY,oBAC/D,OAAO,EACV,CAAC;QACH,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,6BAA6B,CACvC,MAAM,EACN,KAAK,EACL,YAAY,EACZ,QAAQ,CAAC,iBAAiB,kBAExB,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,OAAO,EAEb,CAAC;YACJ,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,gCAAgC,CAC3C,MAAc,EACd,KAAa,EACb,eAAuB,EACvB,UAA4C,EAAE;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,iDAAiD,EACjD,OAAO,CACR,CAAC;QAEF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,iBAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EACf,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,IAAI,kBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,oBAAoB,CAC/B,oBAA8C,EAC9C,cAAuB,EACvB,UAA+C,EAAE;;QAEjD,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,oBAAoB,CAAC,oBAAoB,kBACzE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,kBAAkB,EAAE,cAAc,EAClC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACI,KAAK,CAAC,oBAAoB,CAC/B,UAAkB,EAClB,UAA+C,EAAE;;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,UAAU,kBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n generateUuid,\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n TransferProgressEvent,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PollOperationState } from \"@azure/core-lro\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Readable } from \"stream\";\n\nimport { BlobDownloadResponse } from \"./BlobDownloadResponse\";\nimport { BlobQueryResponse } from \"./BlobQueryResponse\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from \"./generated/src/operations\";\nimport {\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobCreateResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobCreateSnapshotResponse,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobDownloadResponseModel,\n BlobGetPropertiesResponseModel,\n BlobGetTagsHeaders,\n BlobSetHTTPHeadersResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobSetTierResponse,\n BlobStartCopyFromURLResponse,\n BlobTags,\n BlobUndeleteResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobStageBlockResponse,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockListType,\n CpkInfo,\n DeleteSnapshotsOptionType,\n LeaseAccessConditions,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalResponse,\n PageBlobCreateResponse,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobUploadPagesResponse,\n RehydratePriority,\n SequenceNumberActionType,\n BlockBlobPutBlobFromUrlResponse,\n BlobHTTPHeaders,\n PageBlobGetPageRangesResponseModel,\n PageRangeInfo,\n PageBlobGetPageRangesDiffResponseModel,\n BlobCopySourceTags,\n} from \"./generatedModels\";\nimport {\n AppendBlobRequestConditions,\n BlobDownloadResponseParsed,\n BlobRequestConditions,\n BlockBlobTier,\n ensureCpkIfSpecified,\n Metadata,\n ObjectReplicationPolicy,\n PageBlobRequestConditions,\n PremiumPageBlobTier,\n Tags,\n toAccessTier,\n TagConditions,\n MatchConditions,\n ModificationConditions,\n ModifiedAccessConditions,\n BlobQueryArrowField,\n BlobImmutabilityPolicy,\n HttpAuthorization,\n PollerLikeWithCancellation,\n} from \"./models\";\nimport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n rangeResponseFromModel,\n} from \"./PageBlobRangeResponse\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport {\n BlobBeginCopyFromUrlPoller,\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\nimport { Range, rangeToString } from \"./Range\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { Batch } from \"./utils/Batch\";\nimport { BufferScheduler } from \"../../storage-common/src\";\nimport {\n BlobDoesNotUseCustomerSpecifiedEncryption,\n BlobUsesCustomerSpecifiedEncryptionMsg,\n BLOCK_BLOB_MAX_BLOCKS,\n BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES,\n BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES,\n DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES,\n DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS,\n ETagAny,\n URLConstants,\n} from \"./utils/constants\";\nimport { createSpan, convertTracingToRequestOptionsBase } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n ExtractPageRangeInfoItems,\n generateBlockID,\n getURLParameter,\n httpAuthorizationToString,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n setURLParameter,\n toBlobTags,\n toBlobTagsString,\n toQuerySerialization,\n toTags,\n} from \"./utils/utils.common\";\nimport {\n fsCreateReadStream,\n fsStat,\n readStreamToLocalFile,\n streamToBuffer,\n} from \"./utils/utils.node\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobSASPermissions } from \"./sas/BlobSASPermissions\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldResponse,\n} from \"./generatedModels\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions {\n /**\n * The amount of time in milliseconds the poller should wait between\n * calls to the service to determine the status of the Blob copy.\n * Defaults to 15 seconds.\n */\n intervalInMs?: number;\n /**\n * Callback to receive the state of the copy progress.\n */\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n /**\n * Serialized poller state that can be used to resume polling from.\n * This may be useful when starting a copy on one process or thread\n * and you wish to continue polling on another process or thread.\n *\n * To get serialized poller state, call `poller.toString()` on an existing\n * poller.\n */\n resumeFrom?: string;\n}\n\n/**\n * Contains response data for the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse {}\n\n/**\n * Options to configure the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve.\n */\n snapshot?: string;\n /**\n * When this is set to true and download range of blob, the service returns the MD5 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentMD5?: boolean;\n /**\n * When this is set to true and download range of blob, the service returns the CRC64 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentCrc64?: boolean;\n /**\n * Conditions to meet when downloading blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Call back to receive events on the progress of download operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original body download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional `FileClient.download()` request will be made\n * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached.\n *\n * Default value is 5, please set a larger value when loading large files in poor network.\n */\n maxRetryRequests?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.exists} operation.\n */\nexport interface BlobExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Conditions to meet.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting blob properties.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.delete} operation.\n */\nexport interface BlobDeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies options to delete blobs that have associated snapshots.\n * - `include`: Delete the base blob and all of its snapshots.\n * - `only`: Delete only the blob's snapshots and not the blob itself.\n */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.undelete} operation.\n */\nexport interface BlobUndeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setHTTPHeaders} operation.\n */\nexport interface BlobSetHTTPHeadersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob HTTP headers.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setMetadata} operation.\n */\nexport interface BlobSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob metadata.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.setTags} operation.\n */\nexport interface BlobSetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getTags} operation.\n */\nexport interface BlobGetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getTags} operation.\n */\nexport type BlobGetTagsResponse = { tags: Tags } & BlobGetTagsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: BlobGetTagsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: BlobTags;\n };\n };\n\n/**\n * Options to configure Blob - Acquire Lease operation.\n */\nexport interface BlobAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Release Lease operation.\n */\nexport interface BlobReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Renew Lease operation.\n */\nexport interface BlobRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Change Lease operation.\n */\nexport interface BlobChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Break Lease operation.\n */\nexport interface BlobBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.createSnapshot} operation.\n */\nexport interface BlobCreateSnapshotOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet when creating blob snapshots.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobStartCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the blob that are being copied.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Overrides the sealed state of the destination blob. Default true.\n */\n sealBlob?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobClient.abortCopyFromURL} operation.\n */\nexport interface BlobAbortCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.syncCopyFromURL} operation.\n */\nexport interface BlobSyncCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Options to configure the {@link BlobClient.setAccessTier} operation.\n */\nexport interface BlobSetTierOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n}\n\n/**\n * Option interface for the {@link BlobClient.downloadToBuffer} operation.\n */\nexport interface BlobDownloadToBufferOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * blockSize is the data every request trying to download.\n * Must be greater than or equal to 0.\n * If set to 0 or undefined, blockSize will automatically calculated according to the blob size.\n */\n blockSize?: number;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original block download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional FileClient.download() request will be made\n * from the broken point, until the requested block has been successfully downloaded or\n * maxRetryRequestsPerBlock is reached.\n *\n * Default value is 5, please set a larger value when in poor network.\n */\n maxRetryRequestsPerBlock?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel download.\n */\n concurrency?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Contains response data for the {@link BlobClient.deleteIfExists} operation.\n */\nexport interface BlobDeleteIfExistsResponse extends BlobDeleteResponse {\n /**\n * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}.\n */\nexport interface CommonGenerateSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n}\n\n/**\n * Options to configure {@link BlobClient.generateSasUrl} operation.\n */\nexport interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: BlobSASPermissions;\n}\n\n/**\n * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation.\n */\nexport interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation.\n */\nexport interface BlobSetImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n modifiedAccessCondition?: ModificationConditions;\n}\n\n/**\n * Options for setting legal hold {@link BlobClient.setLegalHold} operation.\n */\nexport interface BlobSetLegalHoldOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nexport class BlobClient extends StorageClient {\n /**\n * blobContext provided by protocol layer.\n */\n private blobContext: StorageBlob;\n\n private _name: string;\n private _containerName: string;\n\n private _versionId?: string;\n private _snapshot?: string;\n\n /**\n * The name of the blob.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * The name of the storage container the blob is associated with.\n */\n public get containerName(): string {\n return this._containerName;\n }\n\n /**\n *\n * Creates an instance of BlobClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n options = options || {};\n let pipeline: PipelineLike;\n let url: string;\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n if (blobNameOrOptions && typeof blobNameOrOptions !== \"string\") {\n options = blobNameOrOptions as StoragePipelineOptions;\n }\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n\n super(url, pipeline);\n ({ blobName: this._name, containerName: this._containerName } =\n this.getBlobAndContainerNamesFromUrl());\n this.blobContext = new StorageBlob(this.storageClientContext);\n\n this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT) as string;\n this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID) as string;\n }\n\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n public withSnapshot(snapshot: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n public withVersion(versionId: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.VERSIONID,\n versionId.length === 0 ? undefined : versionId\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a AppendBlobClient object.\n *\n */\n public getAppendBlobClient(): AppendBlobClient {\n return new AppendBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a BlockBlobClient object.\n *\n */\n public getBlockBlobClient(): BlockBlobClient {\n return new BlockBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a PageBlobClient object.\n *\n */\n public getPageBlobClient(): PageBlobClient {\n return new PageBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n public async download(\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlobClient-download\", options);\n\n try {\n const res = await this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream\n },\n range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedRes = {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n // Return browser response immediately\n if (!isNode) {\n return wrappedRes;\n }\n\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n\n if (res.contentLength === undefined) {\n throw new RangeError(`File download response doesn't contain valid content length header`);\n }\n\n if (!res.etag) {\n throw new RangeError(`File download response doesn't contain valid etag header`);\n }\n\n return new BlobDownloadResponse(\n wrappedRes,\n async (start: number): Promise => {\n const updatedDownloadOptions: BlobDownloadOptionalParams = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions!.ifMatch || res.etag,\n ifModifiedSince: options.conditions!.ifModifiedSince,\n ifNoneMatch: options.conditions!.ifNoneMatch,\n ifUnmodifiedSince: options.conditions!.ifUnmodifiedSince,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({\n count: offset + res.contentLength! - start,\n offset: start,\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n };\n\n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n\n return (\n await this.blobContext.download({\n abortSignal: options.abortSignal,\n ...updatedDownloadOptions,\n })\n ).readableStreamBody!;\n },\n offset,\n res.contentLength!,\n {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress,\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n public async exists(options: BlobExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-exists\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n await this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n // Expected exception when checking blob existence\n return false;\n } else if (\n e.statusCode === 409 &&\n (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||\n e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)\n ) {\n // Expected exception when checking blob existence\n return true;\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n public async getProperties(\n options: BlobGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getProperties\", options);\n try {\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const res = await this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n return {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async delete(options: BlobDeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-delete\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.blobContext.delete({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async deleteIfExists(\n options: BlobDeleteOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteIfExists\", options);\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n public async undelete(options: BlobUndeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-undelete\", options);\n try {\n return await this.blobContext.undelete({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * A common header to set is `blobContentType`\n * enabling the browser to provide functionality\n * based on file type.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n public async setHTTPHeaders(\n blobHTTPHeaders?: BlobHTTPHeaders,\n options: BlobSetHTTPHeadersOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setHTTPHeaders\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setHttpHeaders({\n abortSignal: options.abortSignal,\n blobHttpHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: BlobSetMetadataOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setMetadata\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n public async setTags(tags: Tags, options: BlobSetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setTags\", options);\n try {\n return await this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n tags: toBlobTags(tags),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n public async getTags(options: BlobGetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getTags\", options);\n try {\n const response = await this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n const wrappedResponse: BlobGetTagsResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n tags: toTags({ blobTagSet: response.blobTagSet }) || {},\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n public async createSnapshot(\n options: BlobCreateSnapshotOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-createSnapshot\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n public async beginCopyFromURL(\n copySource: string,\n options: BlobBeginCopyFromURLOptions = {}\n ): Promise<\n PollerLikeWithCancellation<\n PollOperationState,\n BlobBeginCopyFromURLResponse\n >\n > {\n const client: CopyPollerBlobClient = {\n abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),\n getProperties: (...args) => this.getProperties(...args),\n startCopyFromURL: (...args) => this.startCopyFromURL(...args),\n };\n const poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options,\n });\n\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n await poller.poll();\n\n return poller;\n }\n\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n public async abortCopyFromURL(\n copyId: string,\n options: BlobAbortCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-abortCopyFromURL\", options);\n try {\n return await this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n public async syncCopyFromURL(\n copySource: string,\n options: BlobSyncCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-syncCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n sourceContentMD5: options.sourceContentMD5,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n encryptionScope: options.encryptionScope,\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n public async setAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string,\n options: BlobSetTierOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setAccessTier\", options);\n try {\n return await this.blobContext.setTier(toAccessTier(tier)!, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n rehydratePriority: options.rehydratePriority,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level function\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param buffer - Buffer to be fill, must have length larger than count\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n buffer: Buffer,\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n public async downloadToBuffer(\n param1?: Buffer | number,\n param2?: number,\n param3?: BlobDownloadToBufferOptions | number,\n param4: BlobDownloadToBufferOptions = {}\n ): Promise {\n let buffer: Buffer | undefined;\n let offset = 0;\n let count = 0;\n let options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n } else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = (param3 as BlobDownloadToBufferOptions) || {};\n }\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToBuffer\", options);\n\n try {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n\n if (!options.conditions) {\n options.conditions = {};\n }\n\n // Customer doesn't specify length, get it\n if (!count) {\n const response = await this.getProperties({\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n count = response.contentLength! - offset;\n if (count < 0) {\n throw new RangeError(\n `offset ${offset} shouldn't be larger than blob size ${response.contentLength!}`\n );\n }\n }\n\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n } catch (error: any) {\n throw new Error(\n `Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\\t ${error.message}`\n );\n }\n }\n\n if (buffer.length < count) {\n throw new RangeError(\n `The buffer's size should be equal to or larger than the request count of bytes: ${count}`\n );\n }\n\n let transferProgress: number = 0;\n const batch = new Batch(options.concurrency);\n for (let off = offset; off < offset + count; off = off + options.blockSize) {\n batch.addOperation(async () => {\n // Exclusive chunk end position\n let chunkEnd = offset + count!;\n if (off + options.blockSize! < chunkEnd) {\n chunkEnd = off + options.blockSize!;\n }\n const response = await this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n const stream = response.readableStreamBody!;\n await streamToBuffer(stream, buffer!, off - offset, chunkEnd - offset);\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n });\n }\n await batch.do();\n return buffer;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n public async downloadToFile(\n filePath: string,\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToFile\", options);\n try {\n const response = await this.download(offset, count, {\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n if (response.readableStreamBody) {\n await readStreamToLocalFile(response.readableStreamBody, filePath);\n }\n\n // The stream is no longer accessible so setting it to undefined.\n (response as any).blobDownloadStream = undefined;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n private getBlobAndContainerNamesFromUrl(): { blobName: string; containerName: string } {\n let containerName;\n let blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents![2];\n blobName = pathComponents![4];\n } else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n }\n\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return { blobName, containerName };\n } catch (error: any) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n private async startCopyFromURL(\n copySource: string,\n options: BlobStartCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-startCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions,\n },\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: BlobGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n blobName: this._name,\n snapshotTime: this._snapshot,\n versionId: this._versionId,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Delete the immutablility policy on the blob.\n *\n * @param options - Optional options to delete immutability policy on the blob.\n */\n public async deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteImmutabilityPolicy\", options);\n try {\n return await this.blobContext.deleteImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set immutablility policy on the blob.\n *\n * @param options - Optional options to set immutability policy on the blob.\n */\n public async setImmutabilityPolicy(\n immutabilityPolicy: BlobImmutabilityPolicy,\n options?: BlobSetImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setImmutabilityPolicy\", options);\n try {\n return await this.blobContext.setImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,\n immutabilityPolicyMode: immutabilityPolicy.policyMode,\n modifiedAccessConditions: options?.modifiedAccessCondition,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set legal hold on the blob.\n *\n * @param options - Optional options to set legal hold on the blob.\n */\n public async setLegalHold(\n legalHoldEnabled: boolean,\n options?: BlobSetLegalHoldOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setLegalHold\", options);\n try {\n return await this.blobContext.setLegalHold(legalHoldEnabled, {\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link AppendBlobClient.create} operation.\n */\nexport interface AppendBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Conditions to meet when creating append blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when creating append blobs. A common header\n * to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * HTTP headers to set when creating append blobs. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.seal} operation.\n */\nexport interface AppendBlobSealOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet.\n */\n conditions?: AppendBlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlock} operation.\n */\nexport interface AppendBlobAppendBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Callback to receive events on the progress of append block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation.\n */\nexport interface AppendBlobAppendBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link appendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nexport class AppendBlobClient extends BlobClient {\n /**\n * appendBlobsContext provided by protocol layer.\n */\n private appendBlobContext: AppendBlob;\n\n /**\n *\n * Creates an instance of AppendBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.appendBlobContext = new AppendBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): AppendBlobClient {\n return new AppendBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n public async create(options: AppendBlobCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-create\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n public async createIfNotExists(\n options: AppendBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-createIfNotExists\", options);\n const conditions = { ifNoneMatch: ETagAny };\n try {\n const res = await this.create({\n ...updatedOptions,\n conditions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n public async seal(options: AppendBlobSealOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-seal\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n public async appendBlock(\n body: HttpRequestBody,\n contentLength: number,\n options: AppendBlobAppendBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlock\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlock(contentLength, body, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n public async appendBlockFromURL(\n sourceURL: string,\n sourceOffset: number,\n count: number,\n options: AppendBlobAppendBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlockFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link BlockBlobClient.upload} operation.\n */\nexport interface BlockBlobUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when uploading to a block blob. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when uploading to a block blob.\n */\n metadata?: Metadata;\n /**\n * Callback to receive events on the progress of upload operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation.\n */\nexport interface BlockBlobSyncUploadFromURLOptions extends CommonOptions {\n /**\n * Server timeout in seconds.\n * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations\n */\n timeoutInSeconds?: number;\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value\n * pairs are specified, the operation will copy the metadata from the source blob or file to the\n * destination blob. If one or more name-value pairs are specified, the destination blob is\n * created with the specified metadata, and metadata is not copied from the source blob or file.\n * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules\n * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more\n * information.\n */\n metadata?: Metadata;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Optional, default is true. Indicates if properties from the source blob should be copied.\n */\n copySourceBlobProperties?: boolean;\n /**\n * HTTP headers to set when uploading to a block blob.\n *\n * A common header to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * Conditions to meet for the destination Azure Blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Conditions to meet for the source Azure Blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Blob query error type.\n */\nexport interface BlobQueryError {\n /**\n * Whether error is fatal. Fatal error will stop query.\n */\n isFatal: boolean;\n /**\n * Error name.\n */\n name: string;\n /**\n * Position in bytes of the query.\n */\n position: number;\n /**\n * Error description.\n */\n description: string;\n}\n\n/**\n * Options to query blob with JSON format.\n */\nexport interface BlobQueryJsonTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a JSON format blob.\n */\n kind: \"json\";\n}\n\n/**\n * Options to query blob with CSV format.\n */\nexport interface BlobQueryCsvTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a CSV format blob.\n */\n kind: \"csv\";\n /**\n * Column separator. Default is \",\".\n */\n columnSeparator?: string;\n /**\n * Field quote.\n */\n fieldQuote?: string;\n /**\n * Escape character.\n */\n escapeCharacter?: string;\n /**\n * Has headers. Default is false.\n */\n hasHeaders?: boolean;\n}\n\n/**\n * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}.\n */\nexport interface BlobQueryArrowConfiguration {\n /**\n * Kind.\n */\n kind: \"arrow\";\n\n /**\n * List of {@link BlobQueryArrowField} describing the schema of the data.\n */\n schema: BlobQueryArrowField[];\n}\n\n/**\n * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}.\n */\nexport interface BlobQueryParquetConfiguration {\n /**\n * Kind.\n */\n kind: \"parquet\";\n}\n\n/**\n * Options to configure {@link BlockBlobClient.query} operation.\n */\nexport interface BlockBlobQueryOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Configurations for the query input.\n */\n inputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryParquetConfiguration;\n /**\n * Configurations for the query output.\n */\n outputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration;\n /**\n * Callback to receive events on the progress of query operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlock} operation.\n */\nexport interface BlockBlobStageBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * Callback to receive events on the progress of stage block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n\n /**\n * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation.\n */\nexport interface BlockBlobStageBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the bytes of the source Blob/File to upload.\n * If not specified, the entire content is uploaded as a single block.\n */\n range?: Range;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.commitBlockList} operation.\n */\nexport interface BlockBlobCommitBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when committing the block list.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when committing block list.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when committing block list.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.getBlockList} operation.\n */\nexport interface BlockBlobGetBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n}\n\n/**\n * Option interface for the {@link BlockBlobClient.uploadStream} operation.\n */\nexport interface BlockBlobUploadStreamOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Blob HTTP Headers.\n *\n * A common header to set is `blobContentType`, enabling the\n * browser to provide functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n/**\n * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}.\n */\nexport interface BlockBlobParallelUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Destination block blob size in bytes.\n */\n blockSize?: number;\n\n /**\n * Blob size threshold in bytes to start concurrency uploading.\n * Default value is 256MB, blob size less than this option will\n * be uploaded via one I/O operation without concurrency.\n * You can customize a value less equal than the default value.\n */\n maxSingleShotSize?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Blob HTTP Headers. A common header to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel uploading. Must be greater than or equal to 0.\n */\n concurrency?: number;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n\n/**\n * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and\n * {@link BlockBlobClient.uploadBrowserDate}.\n */\nexport type BlobUploadCommonResponse = BlockBlobUploadHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse;\n};\n\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nexport class BlockBlobClient extends BlobClient {\n /**\n * blobContext provided by protocol layer.\n *\n * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API\n * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient.\n */\n private _blobContext: StorageBlob;\n\n /**\n * blockBlobContext provided by protocol layer.\n */\n private blockBlobContext: BlockBlob;\n\n /**\n *\n * Creates an instance of BlockBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n if (blobNameOrOptions && typeof blobNameOrOptions !== \"string\") {\n options = blobNameOrOptions as StoragePipelineOptions;\n }\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.blockBlobContext = new BlockBlob(this.storageClientContext);\n this._blobContext = new StorageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): BlockBlobClient {\n return new BlockBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n public async query(\n query: string,\n options: BlockBlobQueryOptions = {}\n ): Promise {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-query\", options);\n\n try {\n if (!isNode) {\n throw new Error(\"This operation currently is only supported in Node.js.\");\n }\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const response = await this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n queryType: \"SQL\",\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration),\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n return new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError,\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public async upload(\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-upload\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.upload(contentLength, body, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n\n public async syncUploadFromURL(\n sourceURL: string,\n options: BlockBlobSyncUploadFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-syncUploadFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, {\n ...options,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions?.ifMatch,\n sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n public async stageBlock(\n blockId: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobStageBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlock\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n public async stageBlockFromURL(\n blockId: string,\n sourceURL: string,\n offset: number = 0,\n count?: number,\n options: BlockBlobStageBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlockFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n public async commitBlockList(\n blocks: string[],\n options: BlockBlobCommitBlockListOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-commitBlockList\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.commitBlockList(\n { latest: blocks },\n {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n public async getBlockList(\n listType: BlockListType,\n options: BlockBlobGetBlockListOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-getBlockList\", options);\n try {\n const res = await this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level functions\n\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n public async uploadData(\n data: Buffer | Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadData\", options);\n try {\n if (isNode) {\n let buffer: Buffer;\n if (data instanceof Buffer) {\n buffer = data;\n } else if (data instanceof ArrayBuffer) {\n buffer = Buffer.from(data);\n } else {\n data = data as ArrayBufferView;\n buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n\n return this.uploadSeekableInternal(\n (offset: number, size: number): Buffer => buffer.slice(offset, offset + size),\n buffer.byteLength,\n updatedOptions\n );\n } else {\n const browserBlob = new Blob([data]);\n return this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n }\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadBrowserData(\n browserData: Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadBrowserData\", options);\n try {\n const browserBlob = new Blob([browserData]);\n return await this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n private async uploadSeekableInternal(\n bodyFactory: (offset: number, size: number) => HttpRequestBody,\n size: number,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\n `blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`\n );\n }\n\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (\n options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES\n ) {\n throw new RangeError(\n `maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`\n );\n }\n\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(`${size} is too larger to upload to a block blob.`);\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options);\n\n try {\n if (size <= options.maxSingleShotSize) {\n return await this.upload(bodyFactory(0, size), size, updatedOptions);\n }\n\n const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\n `The buffer's size is too big or the BlockSize is too small;` +\n `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`\n );\n }\n\n const blockList: string[] = [];\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n\n const batch = new Batch(options.concurrency);\n for (let i = 0; i < numBlocks; i++) {\n batch.addOperation(async (): Promise => {\n const blockID = generateBlockID(blockIDPrefix, i);\n const start = options.blockSize! * i;\n const end = i === numBlocks - 1 ? size : start + options.blockSize!;\n const contentLength = end - start;\n blockList.push(blockID);\n await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress += contentLength;\n if (options.onProgress) {\n options.onProgress!({\n loadedBytes: transferProgress,\n });\n }\n });\n }\n await batch.do();\n\n return this.commitBlockList(blockList, updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadFile(\n filePath: string,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadFile\", options);\n try {\n const size = (await fsStat(filePath)).size;\n return await this.uploadSeekableInternal(\n (offset, count) => {\n return () =>\n fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset,\n });\n },\n size,\n {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadStream(\n stream: Readable,\n bufferSize: number = DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n maxConcurrency: number = 5,\n options: BlockBlobUploadStreamOptions = {}\n ): Promise {\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadStream\", options);\n\n try {\n let blockNum = 0;\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n const blockList: string[] = [];\n\n const scheduler = new BufferScheduler(\n stream,\n bufferSize,\n maxConcurrency,\n async (body, length) => {\n const blockID = generateBlockID(blockIDPrefix, blockNum);\n blockList.push(blockID);\n blockNum++;\n\n await this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n },\n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3)\n );\n await scheduler.do();\n\n return await this.commitBlockList(blockList, {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure the {@link PageBlobClient.create} operation.\n */\nexport interface PageBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when creating a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.uploadPages} operation.\n */\nexport interface PageBlobUploadPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Callback to receive events on the progress of upload pages operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.clearPages} operation.\n */\nexport interface PageBlobClearPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when clearing pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure page blob - get page ranges segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesSegment}\n * - {@link PageBlobClient.listPageRangeItemSegments}\n * - {@link PageBlobClient.listPageRangeItems}\n */\ninterface PageBlobListPageRangesSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRanges} operation.\n */\nexport interface PageBlobListPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n /**\n * (unused)\n */\n range?: string;\n}\n\n/**\n * Options to configure page blob - get page ranges diff segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesDiffSegment}\n * - {@link PageBlobClient.listPageRangeDiffItemSegments}\n * - {@link PageBlobClient.listPageRangeDiffItems}\n */\ninterface PageBlobListPageRangesDiffSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation.\n */\nexport interface PageBlobListPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.resize} operation.\n */\nexport interface PageBlobResizeOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when resizing a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link PageBlobClient.updateSequenceNumber} operation.\n */\nexport interface PageBlobUpdateSequenceNumberOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.startCopyIncremental} operation.\n */\nexport interface PageBlobStartCopyIncrementalOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when starting a copy incremental operation.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation.\n */\nexport interface PageBlobUploadPagesFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nexport class PageBlobClient extends BlobClient {\n /**\n * pageBlobsContext provided by protocol layer.\n */\n private pageBlobContext: PageBlob;\n\n /**\n *\n * Creates an instance of PageBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n *\n * @param url - A URL string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.pageBlobContext = new PageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): PageBlobClient {\n return new PageBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n public async create(\n size: number,\n options: PageBlobCreateOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-create\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n public async createIfNotExists(\n size: number,\n options: PageBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-createIfNotExists\", options);\n try {\n const conditions = { ifNoneMatch: ETagAny };\n const res = await this.create(size, {\n ...options,\n conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n public async uploadPages(\n body: HttpRequestBody,\n offset: number,\n count: number,\n options: PageBlobUploadPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPages\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPages(count, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n public async uploadPagesFromURL(\n sourceURL: string,\n sourceOffset: number,\n destOffset: number,\n count: number,\n options: PageBlobUploadPagesFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPagesFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPagesFromURL(\n sourceURL,\n rangeToString({ offset: sourceOffset, count }),\n 0,\n rangeToString({ offset: destOffset, count }),\n {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n public async clearPages(\n offset: number = 0,\n count?: number,\n options: PageBlobClearPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-clearPages\", options);\n try {\n return await this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n public async getPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobGetPageRangesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRanges\", options);\n try {\n return await this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesSegment returns a single segment of page ranges starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to PageBlob Get Page Ranges Segment operation.\n */\n private async listPageRangesSegment(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesSegment\", options);\n try {\n return await this.pageBlobContext.getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n marker: marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItemSegments(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(\n offset,\n count,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItems(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeItemSegments(\n offset,\n count,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges for a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges for a page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRanges()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRanges();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeItems(offset, count, options);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeItemSegments(offset, count, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...options,\n });\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiff\", options);\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesDiffSegment returns a single segment of page ranges starting from the\n * specified Marker for difference between previous snapshot and the target page blob.\n * Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesDiffSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async listPageRangesDiffSegment(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiffSegment\", options);\n try {\n return await this.pageBlobContext.getPageRangesDiff({\n abortSignal: options?.abortSignal,\n leaseAccessConditions: options?.conditions,\n modifiedAccessConditions: {\n ...options?.conditions,\n ifTags: options?.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshotOrUrl,\n range: rangeToString({\n offset: offset,\n count: count,\n }),\n marker: marker,\n maxPageSize: options?.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}\n *\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItemSegments(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItems(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRangesDiff();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobListPageRangesDiffOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, {\n ...options,\n });\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshot,\n settings.continuationToken,\n {\n maxPageSize: settings.maxPageSize,\n ...options,\n }\n );\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiffForManagedDisks(\n offset: number,\n count: number,\n prevSnapshotUrl: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\n \"PageBlobClient-GetPageRangesDiffForManagedDisks\",\n options\n );\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevSnapshotUrl,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n public async resize(\n size: number,\n options: PageBlobResizeOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-resize\", options);\n try {\n return await this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n public async updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n sequenceNumber?: number,\n options: PageBlobUpdateSequenceNumberOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-updateSequenceNumber\", options);\n try {\n return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n public async startCopyIncremental(\n copySource: string,\n options: PageBlobStartCopyIncrementalOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-startCopyIncremental\", options);\n try {\n return await this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js new file mode 100644 index 0000000..e6bd251 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js @@ -0,0 +1,1261 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +import { getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { Container } from "./generated/src/operations"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { StorageClient } from "./StorageClient"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, EscapePath, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, toTags, truncatedISO8061Date, } from "./utils/utils.common"; +import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; +import { BlobLeaseClient } from "./BlobLeaseClient"; +import { AppendBlobClient, BlobClient, BlockBlobClient, PageBlobClient, } from "./Clients"; +import { BlobBatchClient } from "./BlobBatchClient"; +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return __asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield __await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield __await(yield __await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return __asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return __asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield __await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield __await(yield __await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield __await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield __await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield __await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield __await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} +//# sourceMappingURL=ContainerClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map new file mode 100644 index 0000000..829ed3e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ContainerClient.js","sourceRoot":"","sources":["../../../src/ContainerClient.ts"],"names":[],"mappings":";AAGA,OAAO,EACL,uBAAuB,EAGvB,MAAM,EACN,iBAAiB,EAEjB,UAAU,GACX,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAgCvD,OAAO,EAAE,WAAW,EAAgB,cAAc,EAA0B,MAAM,YAAY,CAAC;AAC/F,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,gBAAgB,EAChB,qCAAqC,EACrC,0CAA0C,EAC1C,UAAU,EACV,4BAA4B,EAC5B,iBAAiB,EACjB,4BAA4B,EAC5B,MAAM,EACN,oBAAoB,GACrB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,UAAU,EAEV,eAAe,EAGf,cAAc,GACf,MAAM,WAAW,CAAC;AACnB,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAyiBpD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,aAAa;IAgEhD,YACE,qBAA6B,EAC7B,mCAKgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAE1D,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,CAAC;oBAE7E,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC;wBACtE,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;SAC5E;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACnE,CAAC;IApID;;OAEG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IAiID;;;;;;;;;;;;;;;;OAgBG;IACI,KAAK,CAAC,MAAM,CAAC,UAAkC,EAAE;QACtD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,gDAAgD;YAChD,uFAAuF;YACvF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,iCACpC,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,iBAAiB,CAC5B,UAAkC,EAAE;;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,wBAAwB,EAAE;gBACrD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EACL,iFAAiF;iBACpF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAAkC,EAAE;QACtD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,cAAc,EAAE,cAAc,CAAC,cAAc;aAC9C,CAAC,CAAC;YACH,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,sDAAsD;iBAChE,CAAC,CAAC;gBACH,OAAO,KAAK,CAAC;aACd;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,aAAa,CAAC,QAAgB;QACnC,OAAO,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACxF,CAAC;IAED;;;;OAIG;IACI,mBAAmB,CAAC,QAAgB;QACzC,OAAO,IAAI,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC9F,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACI,kBAAkB,CAAC,QAAgB;QACxC,OAAO,IAAI,eAAe,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC7F,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,QAAgB;QACvC,OAAO,IAAI,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC5F,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,aAAa,CACxB,UAAyC,EAAE;QAE3C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;QACtF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,+BAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,OAAO,CAAC,UAAU,GAClB,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,MAAM,CACjB,UAAwC,EAAE;QAE1C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,iBACvC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,cAAc,CACzB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,gCAAgC,EAAE,OAAO,CAAC,CAAC;QAEvF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,iEAAiE;iBAC3E,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,WAAW,CACtB,QAAmB,EACnB,UAAuC,EAAE;QAEzC,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,IAAI,OAAO,CAAC,UAAU,CAAC,iBAAiB,EAAE;YACxC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;SACH;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,eAAe,CAC1B,UAA2C,EAAE;QAE7C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QAExF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,iBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,IACtC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,GAAG,GAAqC;gBAC5C,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,gBAAgB,EAAE,QAAQ,CAAC,gBAAgB;gBAC3C,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,YAAY,EAAE,QAAQ,CAAC,YAAY;gBACnC,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe;gBACzC,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,QAAQ,CAAC,OAAO;aAC1B,CAAC;YAEF,KAAK,MAAM,UAAU,IAAI,QAAQ,EAAE;gBACjC,IAAI,YAAY,GAAQ,SAAS,CAAC;gBAClC,IAAI,UAAU,CAAC,YAAY,EAAE;oBAC3B,YAAY,GAAG;wBACb,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;qBACjD,CAAC;oBAEF,IAAI,UAAU,CAAC,YAAY,CAAC,SAAS,EAAE;wBACrC,YAAY,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;qBACtE;oBAED,IAAI,UAAU,CAAC,YAAY,CAAC,QAAQ,EAAE;wBACpC,YAAY,CAAC,QAAQ,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;qBACpE;iBACF;gBAED,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC;oBACzB,YAAY;oBACZ,EAAE,EAAE,UAAU,CAAC,EAAE;iBAClB,CAAC,CAAC;aACJ;YAED,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACI,KAAK,CAAC,eAAe,CAC1B,MAAyB,EACzB,YAAiC,EACjC,UAA2C,EAAE;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,GAAG,GAA4B,EAAE,CAAC;YACxC,KAAK,MAAM,UAAU,IAAI,YAAY,IAAI,EAAE,EAAE;gBAC3C,GAAG,CAAC,IAAI,CAAC;oBACP,YAAY,EAAE;wBACZ,SAAS,EAAE,UAAU,CAAC,YAAY,CAAC,SAAS;4BAC1C,CAAC,CAAC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC;4BACzD,CAAC,CAAC,EAAE;wBACN,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;wBAChD,QAAQ,EAAE,UAAU,CAAC,YAAY,CAAC,QAAQ;4BACxC,CAAC,CAAC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;4BACxD,CAAC,CAAC,EAAE;qBACP;oBACD,EAAE,EAAE,UAAU,CAAC,EAAE;iBAClB,CAAC,CAAC;aACJ;YAED,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,iBAChD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACN,YAAY,EAAE,GAAG,EACjB,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,kBAAkB,CAAC,cAAuB;QAC/C,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;OAqBG;IACI,KAAK,CAAC,eAAe,CAC1B,QAAgB,EAChB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,CAAC;YAC1D,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,cAAc,CAAC,CAAC;YACnF,OAAO;gBACL,eAAe;gBACf,QAAQ;aACT,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,QAAgB,EAChB,UAAsC,EAAE;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC9C,IAAI,OAAO,CAAC,SAAS,EAAE;gBACrB,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aACxD;YACD,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SAChD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACK,KAAK,CAAC,mBAAmB,CAC/B,MAAe,EACf,UAA4C,EAAE;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,+BAC9D,MAAM,IACH,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE,qCAAqC,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,KAElF,OAAO,kCACF,QAAQ,CAAC,OAAO,KACnB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;wBAC5D,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;wBACF,OAAO,QAAQ,CAAC;oBAClB,CAAC,CAAC,MAEL,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACK,KAAK,CAAC,wBAAwB,CACpC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE;;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QACF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,CAAC,SAAS,gCAC7E,MAAM,IACH,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE,0CAA0C,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,KAEvF,OAAO,kCACF,QAAQ,CAAC,OAAO,KACnB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;wBAC5D,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;wBACF,OAAO,QAAQ,CAAC;oBAClB,CAAC,CAAC,EACF,YAAY,EAAE,MAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,EAAE,EAAE;wBACtE,MAAM,UAAU,mCACX,kBAAkB,KACrB,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC,GAChD,CAAC;wBACF,OAAO,UAAU,CAAC;oBACpB,CAAC,CAAC,MAEL,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACY,YAAY,CACzB,MAAe,EACf,UAA4C,EAAE;;YAE9C,IAAI,4BAA4B,CAAC;YACjC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,4BAA4B,GAAG,cAAM,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBAC/E,MAAM,GAAG,4BAA4B,CAAC,iBAAiB,CAAC;oBACxD,oBAAM,cAAM,4BAA4B,CAAA,CAAA,CAAC;iBAC1C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;OAIG;IACY,SAAS,CACtB,UAA4C,EAAE;;;YAE9C,IAAI,MAA0B,CAAC;;gBAC/B,KAAiD,IAAA,KAAA,cAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,IAAA;oBAAxE,MAAM,4BAA4B,WAAA,CAAA;oBAC3C,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,4BAA4B,CAAC,OAAO,CAAC,SAAS,CAAA,CAAA,CAAA,CAAC;iBACvD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqEG;IACI,aAAa,CAClB,UAAqC,EAAE;QAEvC,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;YAClC,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;SAClC;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;SACrC;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SACpC;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,cAAc,mCACf,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CACpD,CAAC;QAEF,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAC5C,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,kBACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,cAAc,EACjB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;OAYG;IACY,qBAAqB,CAClC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE;;YAE9C,IAAI,iCAAiC,CAAC;YACtC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,iCAAiC,GAAG,cAAM,IAAI,CAAC,wBAAwB,CACrE,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,iCAAiC,CAAC,iBAAiB,CAAC;oBAC7D,oBAAM,cAAM,iCAAiC,CAAA,CAAA,CAAC;iBAC/C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;OAKG;IACY,oBAAoB,CACjC,SAAiB,EACjB,UAA4C,EAAE;;;YAE9C,IAAI,MAA0B,CAAC;;gBAC/B,KAAsD,IAAA,KAAA,cAAA,IAAI,CAAC,qBAAqB,CAC9E,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,iCAAiC,WAAA,CAAA;oBAKhD,MAAM,OAAO,GAAG,iCAAiC,CAAC,OAAO,CAAC;oBAC1D,IAAI,OAAO,CAAC,YAAY,EAAE;wBACxB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,YAAY,EAAE;4BACzC,oCACE,IAAI,EAAE,QAAQ,IACX,MAAM,EACV,CAAC;yBACH;qBACF;oBACD,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,SAAS,EAAE;wBACpC,oCAAQ,IAAI,EAAE,MAAM,IAAK,IAAI,EAAE,CAAC;qBACjC;iBACF;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA6EG;IACI,oBAAoB,CACzB,SAAiB,EACjB,UAAqC,EAAE;QAKvC,IAAI,SAAS,KAAK,EAAE,EAAE;YACpB,MAAM,IAAI,UAAU,CAAC,iDAAiD,CAAC,CAAC;SACzE;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;YAClC,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;SAClC;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;SACrC;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SACpC;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,cAAc,mCACf,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CACpD,CAAC;QACF,gEAAgE;QAChE,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;QAClE,OAAO;YACL;;eAEG;YACH,KAAK,CAAC,IAAI;gBACR,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE,QAAQ,CAAC,iBAAiB,kBACrE,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,cAAc,EACjB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACK,KAAK,CAAC,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE;QAEpD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,MAAA,IAAI,CAAC,IAAI,0CAAE,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;qBAC1C;oBACD,uCAAY,IAAI,KAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,IAAG;gBACxD,CAAC,CAAC,GACH,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACY,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE;;YAEpD,IAAI,QAAQ,CAAC;YACb,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,QAAQ,GAAG,cAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;oBACtC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,oBAAM,QAAQ,CAAA,CAAC;iBAChB,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACY,oBAAoB,CACjC,sBAA8B,EAC9B,UAAkD,EAAE;;;YAEpD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;iBACtB;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4EG;IACI,eAAe,CACpB,sBAA8B,EAC9B,UAA0C,EAAE;QAE5C,8CAA8C;QAC9C,MAAM,kBAAkB,qBACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,kBACpF,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAEO,uBAAuB;QAC7B,IAAI,aAAa,CAAC;QAClB,IAAI;YACF,mCAAmC;YACnC,mEAAmE;YACnE,yDAAyD;YACzD,+FAA+F;YAC/F,wDAAwD;YAExD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAE7C,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;gBACjD,2DAA2D;gBAC3D,4CAA4C;gBAC5C,+BAA+B;gBAC/B,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;iBAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;gBACvC,8FAA8F;gBAC9F,wHAAwH;gBACxH,gDAAgD;gBAChD,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;iBAAM;gBACL,4CAA4C;gBAC5C,+BAA+B;gBAC/B,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;YAED,mGAAmG;YACnG,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAElD,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YAED,OAAO,aAAa,CAAC;SACtB;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,cAAc,CAAC,OAAuC;QAC3D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;YAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;gBAC5D,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;aACH;YAED,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,IAC/B,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;;;OAMG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { Container } from \"./generated/src/operations\";\nimport {\n BlobDeleteResponse,\n BlobPrefix,\n BlobProperties,\n BlockBlobUploadResponse,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ContainerEncryptionScope,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesResponse,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataResponse,\n FilterBlobItem,\n FilterBlobSegment,\n FilterBlobSegmentModel,\n LeaseAccessConditions,\n ListBlobsFlatSegmentResponseModel,\n ListBlobsHierarchySegmentResponseModel,\n PublicAccessType,\n SignedIdentifierModel,\n} from \"./generatedModels\";\nimport {\n Metadata,\n ObjectReplicationPolicy,\n Tags,\n ContainerRequestConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n BlobNameToString,\n ConvertInternalResponseOfListBlobFlat,\n ConvertInternalResponseOfListBlobHierarchy,\n EscapePath,\n extractConnectionStringParts,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n toTags,\n truncatedISO8061Date,\n} from \"./utils/utils.common\";\nimport { ContainerSASPermissions } from \"./sas/ContainerSASPermissions\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n AppendBlobClient,\n BlobClient,\n BlobDeleteOptions,\n BlockBlobClient,\n BlockBlobUploadOptions,\n CommonGenerateSasUrlOptions,\n PageBlobClient,\n} from \"./Clients\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { ListBlobsIncludeItem } from \"./generated/src\";\n\n/**\n * Options to configure {@link ContainerClient.create} operation.\n */\nexport interface ContainerCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the container.\n */\n metadata?: Metadata;\n /**\n * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include:\n * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account.\n * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request.\n */\n access?: PublicAccessType;\n /**\n * Container encryption scope info.\n */\n containerEncryptionScope?: ContainerEncryptionScope;\n}\n\n/**\n * Options to configure {@link ContainerClient.getProperties} operation.\n */\nexport interface ContainerGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.delete} operation.\n */\nexport interface ContainerDeleteMethodOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting the container.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.exists} operation.\n */\nexport interface ContainerExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure {@link ContainerClient.setMetadata} operation.\n */\nexport interface ContainerSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.getAccessPolicy} operation.\n */\nexport interface ContainerGetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Signed identifier.\n */\nexport interface SignedIdentifier {\n /**\n * a unique id\n */\n id: string;\n /**\n * Access Policy\n */\n accessPolicy: {\n /**\n * Optional. The date-time the policy is active\n */\n startsOn?: Date;\n /**\n * Optional. The date-time the policy expires\n */\n expiresOn?: Date;\n /**\n * The permissions for the acl policy\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n */\n permissions?: string;\n };\n}\n\n/**\n * Contains response data for the {@link ContainerClient.getAccessPolicy} operation.\n */\nexport declare type ContainerGetAccessPolicyResponse = {\n signedIdentifiers: SignedIdentifier[];\n} & ContainerGetAccessPolicyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: SignedIdentifierModel[];\n };\n };\n\n/**\n * Options to configure {@link ContainerClient.setAccessPolicy} operation.\n */\nexport interface ContainerSetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting the access policy.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure Container - Acquire Lease operation.\n */\nexport interface ContainerAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Release Lease operation.\n */\nexport interface ContainerReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Renew Lease operation.\n */\nexport interface ContainerRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Break Lease operation.\n */\nexport interface ContainerBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Change Lease operation.\n */\nexport interface ContainerChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.deleteBlob} operation.\n */\nexport interface ContainerDeleteBlobOptions extends BlobDeleteOptions {\n /**\n * An opaque DateTime value that, when present, specifies the version\n * of the blob to delete. It's for service version 2019-10-10 and newer.\n */\n versionId?: string;\n}\n\n/**\n * Options to configure Container - List Segment operations.\n *\n * See:\n * - {@link ContainerClient.listSegments}\n * - {@link ContainerClient.listBlobFlatSegment}\n * - {@link ContainerClient.listBlobHierarchySegment}\n * - {@link ContainerClient.listHierarchySegments}\n * - {@link ContainerClient.listItemsByHierarchy}\n */\ninterface ContainerListBlobsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify one or more datasets to include in the response.\n */\n include?: ListBlobsIncludeItem[];\n}\n\n/**\n * An interface representing BlobHierarchyListSegment.\n */\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobHierarchySegment operation.\n */\nexport type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse &\n ContainerListBlobHierarchySegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsHierarchySegmentResponseModel;\n };\n };\n\n/**\n * An Azure Storage blob\n */\nexport interface BlobItem {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n properties: BlobProperties;\n metadata?: { [propertyName: string]: string };\n tags?: Tags;\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n hasVersionsOnly?: boolean;\n}\n\n/**\n * An interface representing BlobFlatListSegment.\n */\nexport interface BlobFlatListSegment {\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobFlatSegment operation.\n */\nexport type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse &\n ContainerListBlobFlatSegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsFlatSegmentResponseModel;\n };\n };\n\n/**\n * Options to configure Container - List Blobs operations.\n *\n * See:\n * - {@link ContainerClient.listBlobsFlat}\n * - {@link ContainerClient.listBlobsByHierarchy}\n */\nexport interface ContainerListBlobsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n\n /**\n * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response.\n */\n includeCopy?: boolean;\n /**\n * Specifies whether soft deleted blobs should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether blob metadata be returned in the response.\n */\n includeMetadata?: boolean;\n /**\n * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response.\n */\n includeSnapshots?: boolean;\n /**\n * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response.\n */\n includeVersions?: boolean;\n /**\n * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response.\n */\n includeUncommitedBlobs?: boolean;\n /**\n * Specifies whether blob tags be returned in the response.\n */\n includeTags?: boolean;\n /**\n * Specifies whether deleted blob with versions be returned in the response.\n */\n includeDeletedWithVersions?: boolean;\n /**\n * Specifies whether blob immutability policy be returned in the response.\n */\n includeImmutabilityPolicy?: boolean;\n /**\n * Specifies whether blob legal hold be returned in the response.\n */\n includeLegalHold?: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.createIfNotExists} operation.\n */\nexport interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse {\n /**\n * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.deleteIfExists} operation.\n */\nexport interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse {\n /**\n * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Options to configure {@link ContainerClient.generateSasUrl} operation.\n */\nexport interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: ContainerSASPermissions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.findBlobsByTagsSegment} operation.\n */\ninterface ContainerFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ContainerFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ContainerFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nexport class ContainerClient extends StorageClient {\n /**\n * containerContext provided by protocol layer.\n */\n private containerContext: Container;\n\n private _containerName: string;\n\n /**\n * The name of the container.\n */\n public get containerName(): string {\n return this._containerName;\n }\n /**\n *\n * Creates an instance of ContainerClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions);\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n super(url, pipeline);\n this._containerName = this.getContainerNameFromUrl();\n this.containerContext = new Container(this.storageClientContext);\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n public async create(options: ContainerCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-create\", options);\n try {\n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return await this.containerContext.create({\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n *\n * @param options -\n */\n public async createIfNotExists(\n options: ContainerCreateOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-createIfNotExists\", options);\n try {\n const res = await this.create(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message:\n \"Expected exception when creating a container only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n public async exists(options: ContainerExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-exists\", options);\n try {\n await this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when checking container existence\",\n });\n return false;\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n public getBlobClient(blobName: string): BlobClient {\n return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n public getAppendBlobClient(blobName: string): AppendBlobClient {\n return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public getBlockBlobClient(blobName: string): BlockBlobClient {\n return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n public getPageBlobClient(blobName: string): PageBlobClient {\n return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n public async getProperties(\n options: ContainerGetPropertiesOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getProperties\", options);\n try {\n return await this.containerContext.getProperties({\n abortSignal: options.abortSignal,\n ...options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async delete(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-delete\", options);\n try {\n return await this.containerContext.delete({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async deleteIfExists(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteIfExists\", options);\n\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a container only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: ContainerSetMetadataOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\n \"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\"\n );\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-setMetadata\", options);\n\n try {\n return await this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n public async getAccessPolicy(\n options: ContainerGetAccessPolicyOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getAccessPolicy\", options);\n\n try {\n const response = await this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const res: ContainerGetAccessPolicyResponse = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version,\n };\n\n for (const identifier of response) {\n let accessPolicy: any = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions,\n };\n\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n\n res.signedIdentifiers.push({\n accessPolicy,\n id: identifier.id,\n });\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n public async setAccessPolicy(\n access?: PublicAccessType,\n containerAcl?: SignedIdentifier[],\n options: ContainerSetAccessPolicyOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"ContainerClient-setAccessPolicy\", options);\n try {\n const acl: SignedIdentifierModel[] = [];\n for (const identifier of containerAcl || []) {\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\",\n },\n id: identifier.id,\n });\n }\n\n return await this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n public async uploadBlockBlob(\n blobName: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }> {\n const { span, updatedOptions } = createSpan(\"ContainerClient-uploadBlockBlob\", options);\n try {\n const blockBlobClient = this.getBlockBlobClient(blobName);\n const response = await blockBlobClient.upload(body, contentLength, updatedOptions);\n return {\n blockBlobClient,\n response,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n public async deleteBlob(\n blobName: string,\n options: ContainerDeleteBlobOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteBlob\", options);\n try {\n let blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return await blobClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n private async listBlobFlatSegment(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-listBlobFlatSegment\", options);\n try {\n const response = await this.containerContext.listBlobFlatSegment({\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerListBlobFlatSegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n private async listBlobHierarchySegment(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"ContainerClient-listBlobHierarchySegment\",\n options\n );\n try {\n const response = await this.containerContext.listBlobHierarchySegment(delimiter, {\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerListBlobHierarchySegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefix = {\n ...blobPrefixInternal,\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listSegments(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsFlatSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options);\n marker = listBlobsFlatSegmentResponse.continuationToken;\n yield await listBlobsFlatSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n private async *listItems(\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) {\n yield* listBlobsFlatSegmentResponse.segment.blobItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listBlobsFlat(\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator {\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listItems(updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listHierarchySegments(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsHierarchySegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(\n delimiter,\n marker,\n options\n );\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n yield await listBlobsHierarchySegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n private async *listItemsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator<({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem)> {\n let marker: string | undefined;\n for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(\n delimiter,\n marker,\n options\n )) {\n const segment = listBlobsHierarchySegmentResponse.segment;\n if (segment.blobPrefixes) {\n for (const prefix of segment.blobPrefixes) {\n yield {\n kind: \"prefix\",\n ...prefix,\n };\n }\n }\n for (const blob of segment.blobItems) {\n yield { kind: \"blob\", ...blob };\n }\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient\n * .listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\" })\n * .byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n public listBlobsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator<\n ({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem),\n ContainerListBlobHierarchySegmentResponse\n > {\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n const iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n async next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listHierarchySegments(delimiter, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in the container whose tags\n * match a given search expression.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-findBlobsByTagsSegment\", options);\n\n try {\n const response = await this.containerContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified container.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of containerClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = containerClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = containerClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ContainerFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n private getContainerNameFromUrl(): string {\n let containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath()!.split(\"/\")[2];\n } else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n }\n\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return containerName;\n } catch (error: any) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n }\n\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js new file mode 100644 index 0000000..4854ba6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +export function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} +//# sourceMappingURL=PageBlobRangeResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map new file mode 100644 index 0000000..d442d12 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"PageBlobRangeResponse.js","sourceRoot":"","sources":["../../../src/PageBlobRangeResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA8ElC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,QAAqF;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,uCACK,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,OAEH;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js new file mode 100644 index 0000000..bbb977a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, deserializationPolicy, disableResponseDecompressionPolicy, HttpHeaders, RequestPolicyOptions, WebResource, proxyPolicy, isNode, isTokenCredential, tracingPolicy, logPolicy, keepAlivePolicy, generateClientRequestIdPolicy, } from "@azure/core-http"; +import { logger } from "./log"; +import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory"; +import { StorageRetryPolicyFactory } from "./StorageRetryPolicyFactory"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, } from "./utils/constants"; +import { TelemetryPolicyFactory } from "./TelemetryPolicyFactory"; +import { getCachedDefaultHttpClient } from "./utils/cache"; +import { attachCredential } from "./utils/utils.common"; +import { storageBearerTokenChallengeAuthenticationPolicy } from "./policies/StorageBearerTokenChallengeAuthenticationPolicy"; +// Export following interfaces and types for customers who want to implement their +// own RequestPolicy or HTTPClient +export { BaseRequestPolicy, StorageOAuthScopes, deserializationPolicy, HttpHeaders, WebResource, RequestPolicyOptions, }; +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + deserializationPolicy(undefined, { xmlCharKey: "#" }), + logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(disableResponseDecompressionPolicy()); + } + factories.push(isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} +//# sourceMappingURL=Pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map new file mode 100644 index 0000000..5c1b559 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Pipeline.js","sourceRoot":"","sources":["../../../src/Pipeline.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,kCAAkC,EAElC,WAAW,EAKX,oBAAoB,EAEpB,WAAW,EACX,WAAW,EACX,MAAM,EAEN,iBAAiB,EACjB,aAAa,EACb,SAAS,EAET,eAAe,EAEf,6BAA6B,GAE9B,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,2BAA2B,EAAE,MAAM,+BAA+B,CAAC;AAC5E,OAAO,EAAuB,yBAAyB,EAAE,MAAM,6BAA6B,CAAC;AAE7F,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EACL,kBAAkB,EAClB,oCAAoC,EACpC,wCAAwC,GACzC,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,0BAA0B,EAAE,MAAM,eAAe,CAAC;AAC3D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,+CAA+C,EAAE,MAAM,4DAA4D,CAAC;AAE7H,kFAAkF;AAClF,kCAAkC;AAClC,OAAO,EACL,iBAAiB,EACjB,kBAAkB,EAClB,qBAAqB,EAErB,WAAW,EAGX,WAAW,EAGX,oBAAoB,GACrB,CAAC;AAsCF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,QAAiB;IAC9C,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;QAC7C,OAAO,KAAK,CAAC;KACd;IAED,MAAM,YAAY,GAAG,QAAwB,CAAC;IAE9C,OAAO,CACL,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,SAAS,CAAC;QACrC,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ;QACxC,OAAO,YAAY,CAAC,sBAAsB,KAAK,UAAU,CAC1D,CAAC;AACJ,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,OAAO,QAAQ;IAUnB;;;;;OAKG;IACH,YAAY,SAAiC,EAAE,UAA2B,EAAE;QAC1E,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,uFAAuF;QACvF,kDAAkD;QAClD,IAAI,CAAC,OAAO,mCACP,OAAO,KACV,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,GAC/D,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,sBAAsB;QAC3B,OAAO;YACL,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU;YACnC,sBAAsB,EAAE,IAAI,CAAC,SAAS;SACvC,CAAC;IACJ,CAAC;CACF;AAiCD;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,UAA+E,EAC/E,kBAA0C,EAAE;;IAE5C,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;KACxC;IAED,0FAA0F;IAC1F,mFAAmF;IACnF,sEAAsE;IAEtE,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;IACrF,MAAM,SAAS,GAA2B;QACxC,aAAa,CAAC,EAAE,SAAS,EAAE,eAAe,CAAC,eAAe,EAAE,CAAC;QAC7D,eAAe,CAAC,eAAe,CAAC,gBAAgB,CAAC;QACjD,eAAe;QACf,6BAA6B,EAAE;QAC/B,IAAI,2BAA2B,EAAE;QACjC,IAAI,yBAAyB,CAAC,eAAe,CAAC,YAAY,CAAC;QAC3D,8DAA8D;QAC9D,sEAAsE;QACtE,eAAe;QACf,qBAAqB,CAAC,SAAS,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;QACrD,SAAS,CAAC;YACR,MAAM,EAAE,MAAM,CAAC,IAAI;YACnB,kBAAkB,EAAE,oCAAoC;YACxD,sBAAsB,EAAE,wCAAwC;SACjE,CAAC;KACH,CAAC;IAEF,IAAI,MAAM,EAAE;QACV,8DAA8D;QAC9D,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;KACtD;IACD,SAAS,CAAC,IAAI,CACZ,iBAAiB,CAAC,UAAU,CAAC;QAC3B,CAAC,CAAC,gBAAgB,CACd,+CAA+C,CAC7C,UAAU,EACV,MAAA,eAAe,CAAC,QAAQ,mCAAI,kBAAkB,CAC/C,EACD,UAAU,CACX;QACH,CAAC,CAAC,UAAU,CACf,CAAC;IAEF,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAClD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n disableResponseDecompressionPolicy,\n HttpClient as IHttpClient,\n HttpHeaders,\n HttpOperationResponse,\n HttpRequestBody,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n ServiceClientOptions,\n WebResource,\n proxyPolicy,\n isNode,\n TokenCredential,\n isTokenCredential,\n tracingPolicy,\n logPolicy,\n ProxyOptions,\n keepAlivePolicy,\n KeepAliveOptions,\n generateClientRequestIdPolicy,\n UserAgentOptions,\n} from \"@azure/core-http\";\n\nimport { logger } from \"./log\";\nimport { StorageBrowserPolicyFactory } from \"./StorageBrowserPolicyFactory\";\nimport { StorageRetryOptions, StorageRetryPolicyFactory } from \"./StorageRetryPolicyFactory\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport {\n StorageOAuthScopes,\n StorageBlobLoggingAllowedHeaderNames,\n StorageBlobLoggingAllowedQueryParameters,\n} from \"./utils/constants\";\nimport { TelemetryPolicyFactory } from \"./TelemetryPolicyFactory\";\nimport { getCachedDefaultHttpClient } from \"./utils/cache\";\nimport { attachCredential } from \"./utils/utils.common\";\nimport { storageBearerTokenChallengeAuthenticationPolicy } from \"./policies/StorageBearerTokenChallengeAuthenticationPolicy\";\n\n// Export following interfaces and types for customers who want to implement their\n// own RequestPolicy or HTTPClient\nexport {\n BaseRequestPolicy,\n StorageOAuthScopes,\n deserializationPolicy,\n IHttpClient,\n HttpHeaders,\n HttpRequestBody,\n HttpOperationResponse,\n WebResource,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n};\n\n/**\n * Option interface for Pipeline constructor.\n */\nexport interface PipelineOptions {\n /**\n * Optional. Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n}\n\n/**\n * An interface for the {@link Pipeline} class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport interface PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n readonly options: PipelineOptions;\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n toServiceClientOptions(): ServiceClientOptions;\n}\n\n/**\n * A helper to decide if a given argument satisfies the Pipeline contract\n * @param pipeline - An argument that may be a Pipeline\n * @returns true when the argument satisfies the Pipeline contract\n */\nexport function isPipelineLike(pipeline: unknown): pipeline is PipelineLike {\n if (!pipeline || typeof pipeline !== \"object\") {\n return false;\n }\n\n const castPipeline = pipeline as PipelineLike;\n\n return (\n Array.isArray(castPipeline.factories) &&\n typeof castPipeline.options === \"object\" &&\n typeof castPipeline.toServiceClientOptions === \"function\"\n );\n}\n\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport class Pipeline implements PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n public readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n public readonly options: PipelineOptions;\n\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = {\n ...options,\n httpClient: options.httpClient || getCachedDefaultHttpClient(),\n };\n }\n\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n public toServiceClientOptions(): ServiceClientOptions {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories,\n };\n }\n}\n\n/**\n * Options interface for the {@link newPipeline} function.\n */\nexport interface StoragePipelineOptions {\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n /**\n * Configures the built-in retry policy behavior.\n */\n retryOptions?: StorageRetryOptions;\n /**\n * Keep alive configurations. Default keep-alive is enabled.\n */\n keepAliveOptions?: KeepAliveOptions;\n /**\n * Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n /**\n * The audience used to retrieve an AAD token.\n * By default, audience 'https://storage.azure.com/.default' will be used.\n */\n audience?: string | string[];\n}\n\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nexport function newPipeline(\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n pipelineOptions: StoragePipelineOptions = {}\n): Pipeline {\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n\n const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n const factories: RequestPolicyFactory[] = [\n tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,\n }),\n ];\n\n if (isNode) {\n // policies only available in Node.js runtime, not in browsers\n factories.push(proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(disableResponseDecompressionPolicy());\n }\n factories.push(\n isTokenCredential(credential)\n ? attachCredential(\n storageBearerTokenChallengeAuthenticationPolicy(\n credential,\n pipelineOptions.audience ?? StorageOAuthScopes\n ),\n credential\n )\n : credential\n );\n\n return new Pipeline(factories, pipelineOptions);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js new file mode 100644 index 0000000..ea83a0c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +export function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} +//# sourceMappingURL=Range.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map new file mode 100644 index 0000000..a501b75 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Range.js","sourceRoot":"","sources":["../../../src/Range.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,MAAa;IACzC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;QACrB,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC;KAChE;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;QACrC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;KACH;IACD,OAAO,MAAM,CAAC,KAAK;QACjB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE;QAC9D,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js new file mode 100644 index 0000000..e35175c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageBrowserPolicy } from "./policies/StorageBrowserPolicy"; +export { StorageBrowserPolicy }; +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=StorageBrowserPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map new file mode 100644 index 0000000..655540d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBrowserPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageBrowserPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,oBAAoB,EAAE,CAAC;AAEhC;;GAEG;AACH,MAAM,OAAO,2BAA2B;IACtC;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js new file mode 100644 index 0000000..c36e4ac --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageClientContext } from "./generated/src/storageClientContext"; +import { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from "./utils/utils.common"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { isTokenCredential, isNode } from "@azure/core-http"; +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +export class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} +//# sourceMappingURL=StorageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map new file mode 100644 index 0000000..d7bf4fc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageClient.js","sourceRoot":"","sources":["../../../src/StorageClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAE5E,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAClG,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAmB,iBAAiB,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAa9E;;;GAGG;AACH,MAAM,OAAgB,aAAa;IAyBjC;;;;OAIG;IACH,YAAsB,GAAW,EAAE,QAAsB;QACvD,iFAAiF;QACjF,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;QAC9C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;QAE7D,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;YAC7C,IACE,CAAC,MAAM,IAAI,OAAO,YAAY,0BAA0B,CAAC;gBACzD,OAAO,YAAY,mBAAmB,EACtC;gBACA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;aAC3B;iBAAM,IAAI,iBAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;gBACzD,uEAAuE;gBACvE,0DAA0D;gBAC1D,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;aAC/C;SACF;QAED,iDAAiD;QACjD,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;QAC9D,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js new file mode 100644 index 0000000..4f05729 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageRetryPolicy, StorageRetryPolicyType } from "./policies/StorageRetryPolicy"; +export { StorageRetryPolicyType, StorageRetryPolicy }; +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} +//# sourceMappingURL=StorageRetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map new file mode 100644 index 0000000..9cfff58 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageRetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageRetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAE3F,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,CAAC;AAmDtD;;GAEG;AACH,MAAM,OAAO,yBAAyB;IAGpC;;;OAGG;IACH,YAAY,YAAkC;QAC5C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js new file mode 100644 index 0000000..a2e71a3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode, } from "@azure/core-http"; +import * as os from "os"; +import { TelemetryPolicy } from "./policies/TelemetryPolicy"; +import { SDK_VERSION } from "./utils/constants"; +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +export class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} +//# sourceMappingURL=TelemetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map new file mode 100644 index 0000000..ee8cbca --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TelemetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/TelemetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,MAAM,GAKP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAC7D,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD;;GAEG;AACH,MAAM,OAAO,sBAAsB;IAMjC;;;OAGG;IACH,YAAY,SAA4B;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;QAEnC,IAAI,MAAM,EAAE;YACV,IAAI,SAAS,EAAE;gBACb,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;gBACxD,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC/E,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;iBACrC;aACF;YAED,mCAAmC;YACnC,MAAM,OAAO,GAAG,wBAAwB,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;gBACzC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;aAC7B;YAED,mDAAmD;YACnD,IAAI,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;YACtD,IAAI,EAAE,EAAE;gBACN,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,KAAK,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG,CAAC;aACjF;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7C,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aACjC;SACF;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js new file mode 100644 index 0000000..2511e14 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AnonymousCredentialPolicy } from "../policies/AnonymousCredentialPolicy"; +import { Credential } from "./Credential"; +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=AnonymousCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map new file mode 100644 index 0000000..6819d44 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AnonymousCredential.js","sourceRoot":"","sources":["../../../../src/credentials/AnonymousCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,yBAAyB,EAAE,MAAM,uCAAuC,CAAC;AAClF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;;GAKG;AACH,MAAM,OAAO,mBAAoB,SAAQ,UAAU;IACjD;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js new file mode 100644 index 0000000..6db7be6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +export class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} +//# sourceMappingURL=Credential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map new file mode 100644 index 0000000..a7948dc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Credential.js","sourceRoot":"","sources":["../../../../src/credentials/Credential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;GAGG;AACH,MAAM,OAAgB,UAAU;IAC9B;;;;;OAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B;QACtE,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;IACvE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js new file mode 100644 index 0000000..1235e86 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class StorageSharedKeyCredential { +} +//# sourceMappingURL=StorageSharedKeyCredential.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map new file mode 100644 index 0000000..12177de --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,0BAA0B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class StorageSharedKeyCredential {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js new file mode 100644 index 0000000..c47b1ce --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +import { StorageSharedKeyCredentialPolicy } from "../policies/StorageSharedKeyCredentialPolicy"; +import { Credential } from "./Credential"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} +//# sourceMappingURL=StorageSharedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map new file mode 100644 index 0000000..c73c0e4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC,OAAO,EAAE,gCAAgC,EAAE,MAAM,8CAA8C,CAAC;AAChG,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;GAIG;AACH,MAAM,OAAO,0BAA2B,SAAQ,UAAU;IAWxD;;;;OAIG;IACH,YAAY,WAAmB,EAAE,UAAkB;QACjD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IACzE,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAC7F,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js new file mode 100644 index 0000000..cffcb3c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class UserDelegationKeyCredential { +} +//# sourceMappingURL=UserDelegationKeyCredential.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map new file mode 100644 index 0000000..0a1e590 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"UserDelegationKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,2BAA2B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class UserDelegationKeyCredential {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js new file mode 100644 index 0000000..fa438f6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +export class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} +//# sourceMappingURL=UserDelegationKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map new file mode 100644 index 0000000..df787bb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"UserDelegationKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;;;;GAKG;AACH,MAAM,OAAO,2BAA2B;IAgBtC;;;;OAIG;IACH,YAAY,WAAmB,EAAE,iBAAoC;QACnE,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAC5D,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,gEAAgE;QAEhE,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IACtF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js new file mode 100644 index 0000000..290e550 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js @@ -0,0 +1,11 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./models"; +export { StorageClient } from "./storageClient"; +export { StorageClientContext } from "./storageClientContext"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map new file mode 100644 index 0000000..5601aab --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../src/generated/src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,UAAU,CAAC;AACzB,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./models\";\nexport { StorageClient } from \"./storageClient\";\nexport { StorageClientContext } from \"./storageClientContext\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js new file mode 100644 index 0000000..9593344 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map new file mode 100644 index 0000000..75665a8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\n/** Storage Service Properties. */\nexport interface BlobServiceProperties {\n /** Azure Analytics Logging settings. */\n blobAnalyticsLogging?: Logging;\n /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\n hourMetrics?: Metrics;\n /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\n minuteMetrics?: Metrics;\n /** The set of CORS rules. */\n cors?: CorsRule[];\n /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */\n defaultServiceVersion?: string;\n /** the retention policy which determines how long the associated data should persist */\n deleteRetentionPolicy?: RetentionPolicy;\n /** The properties that enable an account to host a static website */\n staticWebsite?: StaticWebsite;\n}\n\n/** Azure Analytics Logging settings. */\nexport interface Logging {\n /** The version of Storage Analytics to configure. */\n version: string;\n /** Indicates whether all delete requests should be logged. */\n deleteProperty: boolean;\n /** Indicates whether all read requests should be logged. */\n read: boolean;\n /** Indicates whether all write requests should be logged. */\n write: boolean;\n /** the retention policy which determines how long the associated data should persist */\n retentionPolicy: RetentionPolicy;\n}\n\n/** the retention policy which determines how long the associated data should persist */\nexport interface RetentionPolicy {\n /** Indicates whether a retention policy is enabled for the storage service */\n enabled: boolean;\n /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */\n days?: number;\n}\n\n/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\nexport interface Metrics {\n /** The version of Storage Analytics to configure. */\n version?: string;\n /** Indicates whether metrics are enabled for the Blob service. */\n enabled: boolean;\n /** Indicates whether metrics should generate summary statistics for called API operations. */\n includeAPIs?: boolean;\n /** the retention policy which determines how long the associated data should persist */\n retentionPolicy?: RetentionPolicy;\n}\n\n/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */\nexport interface CorsRule {\n /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */\n allowedOrigins: string;\n /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */\n allowedMethods: string;\n /** the request headers that the origin domain may specify on the CORS request. */\n allowedHeaders: string;\n /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */\n exposedHeaders: string;\n /** The maximum amount time that a browser should cache the preflight OPTIONS request. */\n maxAgeInSeconds: number;\n}\n\n/** The properties that enable an account to host a static website */\nexport interface StaticWebsite {\n /** Indicates whether this account is hosting a static website */\n enabled: boolean;\n /** The default name of the index page under each directory */\n indexDocument?: string;\n /** The absolute path of the custom 404 page */\n errorDocument404Path?: string;\n /** Absolute path of the default index page */\n defaultIndexDocumentPath?: string;\n}\n\nexport interface StorageError {\n message?: string;\n code?: string;\n}\n\n/** Stats for the storage service. */\nexport interface BlobServiceStatistics {\n /** Geo-Replication information for the Secondary Storage Service */\n geoReplication?: GeoReplication;\n}\n\n/** Geo-Replication information for the Secondary Storage Service */\nexport interface GeoReplication {\n /** The status of the secondary location */\n status: GeoReplicationStatusType;\n /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */\n lastSyncOn: Date;\n}\n\n/** An enumeration of containers */\nexport interface ListContainersSegmentResponse {\n serviceEndpoint: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n containerItems: ContainerItem[];\n continuationToken?: string;\n}\n\n/** An Azure Storage container */\nexport interface ContainerItem {\n name: string;\n deleted?: boolean;\n version?: string;\n /** Properties of a container */\n properties: ContainerProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n}\n\n/** Properties of a container */\nexport interface ContainerProperties {\n lastModified: Date;\n etag: string;\n leaseStatus?: LeaseStatusType;\n leaseState?: LeaseStateType;\n leaseDuration?: LeaseDurationType;\n publicAccess?: PublicAccessType;\n hasImmutabilityPolicy?: boolean;\n hasLegalHold?: boolean;\n defaultEncryptionScope?: string;\n preventEncryptionScopeOverride?: boolean;\n deletedOn?: Date;\n remainingRetentionDays?: number;\n /** Indicates if version level worm is enabled on this container. */\n isImmutableStorageWithVersioningEnabled?: boolean;\n}\n\n/** Key information */\nexport interface KeyInfo {\n /** The date-time the key is active in ISO 8601 UTC time */\n startsOn: string;\n /** The date-time the key expires in ISO 8601 UTC time */\n expiresOn: string;\n}\n\n/** A user delegation key */\nexport interface UserDelegationKey {\n /** The Azure Active Directory object ID in GUID format. */\n signedObjectId: string;\n /** The Azure Active Directory tenant ID in GUID format */\n signedTenantId: string;\n /** The date-time the key is active */\n signedStartsOn: string;\n /** The date-time the key expires */\n signedExpiresOn: string;\n /** Abbreviation of the Azure Storage service that accepts the key */\n signedService: string;\n /** The service version that created the key */\n signedVersion: string;\n /** The key as a base64 string */\n value: string;\n}\n\n/** The result of a Filter Blobs API call */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\n/** Blob info from a Filter Blobs API call */\nexport interface FilterBlobItem {\n name: string;\n containerName: string;\n /** Blob tags */\n tags?: BlobTags;\n}\n\n/** Blob tags */\nexport interface BlobTags {\n blobTagSet: BlobTag[];\n}\n\nexport interface BlobTag {\n key: string;\n value: string;\n}\n\n/** signed identifier */\nexport interface SignedIdentifier {\n /** a unique id */\n id: string;\n /** An Access policy */\n accessPolicy: AccessPolicy;\n}\n\n/** An Access policy */\nexport interface AccessPolicy {\n /** the date-time the policy is active */\n startsOn?: string;\n /** the date-time the policy expires */\n expiresOn?: string;\n /** the permissions for the acl policy */\n permissions?: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegment {\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: BlobName;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobPropertiesInternal;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\nexport interface BlobName {\n /** Indicates if the blob name is encoded. */\n encoded?: boolean;\n /** The name of the blob. */\n content?: string;\n}\n\n/** Properties of a blob */\nexport interface BlobPropertiesInternal {\n createdOn?: Date;\n lastModified: Date;\n etag: string;\n /** Size in bytes */\n contentLength?: number;\n contentType?: string;\n contentEncoding?: string;\n contentLanguage?: string;\n contentMD5?: Uint8Array;\n contentDisposition?: string;\n cacheControl?: string;\n blobSequenceNumber?: number;\n blobType?: BlobType;\n leaseStatus?: LeaseStatusType;\n leaseState?: LeaseStateType;\n leaseDuration?: LeaseDurationType;\n copyId?: string;\n copyStatus?: CopyStatusType;\n copySource?: string;\n copyProgress?: string;\n copyCompletedOn?: Date;\n copyStatusDescription?: string;\n serverEncrypted?: boolean;\n incrementalCopy?: boolean;\n destinationSnapshot?: string;\n deletedOn?: Date;\n remainingRetentionDays?: number;\n accessTier?: AccessTier;\n accessTierInferred?: boolean;\n archiveStatus?: ArchiveStatus;\n customerProvidedKeySha256?: string;\n /** The name of the encryption scope under which the blob is encrypted. */\n encryptionScope?: string;\n accessTierChangedOn?: Date;\n tagCount?: number;\n expiresOn?: Date;\n isSealed?: boolean;\n /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */\n rehydratePriority?: RehydratePriority;\n lastAccessedOn?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\nexport interface BlobPrefix {\n name: BlobName;\n}\n\nexport interface BlockLookupList {\n committed?: string[];\n uncommitted?: string[];\n latest?: string[];\n}\n\nexport interface BlockList {\n committedBlocks?: Block[];\n uncommittedBlocks?: Block[];\n}\n\n/** Represents a single block in a block blob. It describes the block's ID and size. */\nexport interface Block {\n /** The base64 encoded block ID. */\n name: string;\n /** The block size in bytes. */\n size: number;\n}\n\n/** the list of pages */\nexport interface PageList {\n pageRange?: PageRange[];\n clearRange?: ClearRange[];\n continuationToken?: string;\n}\n\nexport interface PageRange {\n start: number;\n end: number;\n}\n\nexport interface ClearRange {\n start: number;\n end: number;\n}\n\n/** Groups the set of query request settings. */\nexport interface QueryRequest {\n /** Required. The type of the provided query expression. */\n queryType: string;\n /** The query expression in SQL. The maximum size of the query expression is 256KiB. */\n expression: string;\n inputSerialization?: QuerySerialization;\n outputSerialization?: QuerySerialization;\n}\n\nexport interface QuerySerialization {\n format: QueryFormat;\n}\n\nexport interface QueryFormat {\n /** The quick query format type. */\n type: QueryFormatType;\n /** Groups the settings used for interpreting the blob data if the blob is delimited text formatted. */\n delimitedTextConfiguration?: DelimitedTextConfiguration;\n /** json text configuration */\n jsonTextConfiguration?: JsonTextConfiguration;\n /** Groups the settings used for formatting the response if the response should be Arrow formatted. */\n arrowConfiguration?: ArrowConfiguration;\n /** Any object */\n parquetTextConfiguration?: any;\n}\n\n/** Groups the settings used for interpreting the blob data if the blob is delimited text formatted. */\nexport interface DelimitedTextConfiguration {\n /** The string used to separate columns. */\n columnSeparator?: string;\n /** The string used to quote a specific field. */\n fieldQuote?: string;\n /** The string used to separate records. */\n recordSeparator?: string;\n /** The string used as an escape character. */\n escapeChar?: string;\n /** Represents whether the data has headers. */\n headersPresent?: boolean;\n}\n\n/** json text configuration */\nexport interface JsonTextConfiguration {\n /** The string used to separate records. */\n recordSeparator?: string;\n}\n\n/** Groups the settings used for formatting the response if the response should be Arrow formatted. */\nexport interface ArrowConfiguration {\n schema: ArrowField[];\n}\n\n/** Groups settings regarding specific field of an arrow schema */\nexport interface ArrowField {\n type: string;\n name?: string;\n precision?: number;\n scale?: number;\n}\n\n/** Defines headers for Service_setProperties operation. */\nexport interface ServiceSetPropertiesHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_setProperties operation. */\nexport interface ServiceSetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getProperties operation. */\nexport interface ServiceGetPropertiesHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getProperties operation. */\nexport interface ServiceGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getStatistics operation. */\nexport interface ServiceGetStatisticsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getStatistics operation. */\nexport interface ServiceGetStatisticsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_listContainersSegment operation. */\nexport interface ServiceListContainersSegmentHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_listContainersSegment operation. */\nexport interface ServiceListContainersSegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getUserDelegationKey operation. */\nexport interface ServiceGetUserDelegationKeyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getUserDelegationKey operation. */\nexport interface ServiceGetUserDelegationKeyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getAccountInfo operation. */\nexport interface ServiceGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */\n isHierarchicalNamespaceEnabled?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getAccountInfo operation. */\nexport interface ServiceGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_submitBatch operation. */\nexport interface ServiceSubmitBatchHeaders {\n /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */\n contentType?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_submitBatch operation. */\nexport interface ServiceSubmitBatchExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_filterBlobs operation. */\nexport interface ServiceFilterBlobsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_filterBlobs operation. */\nexport interface ServiceFilterBlobsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_create operation. */\nexport interface ContainerCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_create operation. */\nexport interface ContainerCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getProperties operation. */\nexport interface ContainerGetPropertiesHeaders {\n metadata?: { [propertyName: string]: string };\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicated whether data in the container may be accessed publicly and the level of access */\n blobPublicAccess?: PublicAccessType;\n /** Indicates whether the container has an immutability policy set on it. */\n hasImmutabilityPolicy?: boolean;\n /** Indicates whether the container has a legal hold. */\n hasLegalHold?: boolean;\n /** The default encryption scope for the container. */\n defaultEncryptionScope?: string;\n /** Indicates whether the container's default encryption scope can be overriden. */\n denyEncryptionScopeOverride?: boolean;\n /** Indicates whether version level worm is enabled on a container. */\n isImmutableStorageWithVersioningEnabled?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_getProperties operation. */\nexport interface ContainerGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_delete operation. */\nexport interface ContainerDeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_delete operation. */\nexport interface ContainerDeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_setMetadata operation. */\nexport interface ContainerSetMetadataHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_setMetadata operation. */\nexport interface ContainerSetMetadataExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccessPolicy operation. */\nexport interface ContainerGetAccessPolicyHeaders {\n /** Indicated whether data in the container may be accessed publicly and the level of access */\n blobPublicAccess?: PublicAccessType;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccessPolicy operation. */\nexport interface ContainerGetAccessPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_setAccessPolicy operation. */\nexport interface ContainerSetAccessPolicyHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_setAccessPolicy operation. */\nexport interface ContainerSetAccessPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_restore operation. */\nexport interface ContainerRestoreHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_restore operation. */\nexport interface ContainerRestoreExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_rename operation. */\nexport interface ContainerRenameHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_rename operation. */\nexport interface ContainerRenameExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_submitBatch operation. */\nexport interface ContainerSubmitBatchHeaders {\n /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */\n contentType?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n}\n\n/** Defines headers for Container_submitBatch operation. */\nexport interface ContainerSubmitBatchExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_filterBlobs operation. */\nexport interface ContainerFilterBlobsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_filterBlobs operation. */\nexport interface ContainerFilterBlobsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_acquireLease operation. */\nexport interface ContainerAcquireLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_acquireLease operation. */\nexport interface ContainerAcquireLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_releaseLease operation. */\nexport interface ContainerReleaseLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_releaseLease operation. */\nexport interface ContainerReleaseLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_renewLease operation. */\nexport interface ContainerRenewLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_renewLease operation. */\nexport interface ContainerRenewLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_breakLease operation. */\nexport interface ContainerBreakLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Approximate time remaining in the lease period, in seconds. */\n leaseTime?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_breakLease operation. */\nexport interface ContainerBreakLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_changeLease operation. */\nexport interface ContainerChangeLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_changeLease operation. */\nexport interface ContainerChangeLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobFlatSegment operation. */\nexport interface ContainerListBlobFlatSegmentHeaders {\n /** The media type of the body of the response. For List Blobs this is 'application/xml' */\n contentType?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobFlatSegment operation. */\nexport interface ContainerListBlobFlatSegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobHierarchySegment operation. */\nexport interface ContainerListBlobHierarchySegmentHeaders {\n /** The media type of the body of the response. For List Blobs this is 'application/xml' */\n contentType?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobHierarchySegment operation. */\nexport interface ContainerListBlobHierarchySegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccountInfo operation. */\nexport interface ContainerGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n}\n\n/** Defines headers for Container_getAccountInfo operation. */\nexport interface ContainerGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_download operation. */\nexport interface BlobDownloadHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Returns the date and time the blob was created. */\n createdOn?: Date;\n metadata?: { [propertyName: string]: string };\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */\n objectReplicationPolicyId?: string;\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */\n objectReplicationRules?: { [propertyName: string]: string };\n /** The number of bytes present in the response body. */\n contentLength?: number;\n /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */\n contentType?: string;\n /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */\n contentRange?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletedOn?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */\n isCurrentVersion?: boolean;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */\n blobContentMD5?: Uint8Array;\n /** The number of tags associated with the blob */\n tagCount?: number;\n /** If this blob has been sealed */\n isSealed?: boolean;\n /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */\n lastAccessed?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n /** Error Code */\n errorCode?: string;\n /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */\n contentCrc64?: Uint8Array;\n}\n\n/** Defines headers for Blob_download operation. */\nexport interface BlobDownloadExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getProperties operation. */\nexport interface BlobGetPropertiesHeaders {\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Returns the date and time the blob was created. */\n createdOn?: Date;\n metadata?: { [propertyName: string]: string };\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */\n objectReplicationPolicyId?: string;\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */\n objectReplicationRules?: { [propertyName: string]: string };\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletedOn?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Included if the blob is incremental copy blob. */\n isIncrementalCopy?: boolean;\n /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */\n destinationSnapshot?: string;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */\n contentLength?: number;\n /** The content type specified for the blob. The default content type is 'application/octet-stream' */\n contentType?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */\n accessTier?: string;\n /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */\n accessTierInferred?: boolean;\n /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool/rehydrate-pending-to-cold. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */\n archiveStatus?: string;\n /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */\n accessTierChangedOn?: Date;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */\n isCurrentVersion?: boolean;\n /** The number of tags associated with the blob */\n tagCount?: number;\n /** The time this blob will expire. */\n expiresOn?: Date;\n /** If this blob has been sealed */\n isSealed?: boolean;\n /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */\n rehydratePriority?: RehydratePriority;\n /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */\n lastAccessed?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getProperties operation. */\nexport interface BlobGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_delete operation. */\nexport interface BlobDeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_delete operation. */\nexport interface BlobDeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_undelete operation. */\nexport interface BlobUndeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_undelete operation. */\nexport interface BlobUndeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setExpiry operation. */\nexport interface BlobSetExpiryHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */\n date?: Date;\n}\n\n/** Defines headers for Blob_setExpiry operation. */\nexport interface BlobSetExpiryExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setHttpHeaders operation. */\nexport interface BlobSetHttpHeadersHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setHttpHeaders operation. */\nexport interface BlobSetHttpHeadersExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setImmutabilityPolicy operation. */\nexport interface BlobSetImmutabilityPolicyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates the time the immutability policy will expire. */\n immutabilityPolicyExpiry?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n}\n\n/** Defines headers for Blob_setImmutabilityPolicy operation. */\nexport interface BlobSetImmutabilityPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_deleteImmutabilityPolicy operation. */\nexport interface BlobDeleteImmutabilityPolicyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_deleteImmutabilityPolicy operation. */\nexport interface BlobDeleteImmutabilityPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setLegalHold operation. */\nexport interface BlobSetLegalHoldHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates if the blob has a legal hold. */\n legalHold?: boolean;\n}\n\n/** Defines headers for Blob_setLegalHold operation. */\nexport interface BlobSetLegalHoldExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setMetadata operation. */\nexport interface BlobSetMetadataHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setMetadata operation. */\nexport interface BlobSetMetadataExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_acquireLease operation. */\nexport interface BlobAcquireLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_acquireLease operation. */\nexport interface BlobAcquireLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_releaseLease operation. */\nexport interface BlobReleaseLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_releaseLease operation. */\nexport interface BlobReleaseLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_renewLease operation. */\nexport interface BlobRenewLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_renewLease operation. */\nexport interface BlobRenewLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_changeLease operation. */\nexport interface BlobChangeLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_changeLease operation. */\nexport interface BlobChangeLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_breakLease operation. */\nexport interface BlobBreakLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Approximate time remaining in the lease period, in seconds. */\n leaseTime?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_breakLease operation. */\nexport interface BlobBreakLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_createSnapshot operation. */\nexport interface BlobCreateSnapshotHeaders {\n /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */\n snapshot?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */\n isServerEncrypted?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_createSnapshot operation. */\nexport interface BlobCreateSnapshotExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_startCopyFromURL operation. */\nexport interface BlobStartCopyFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_startCopyFromURL operation. */\nexport interface BlobStartCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_copyFromURL operation. */\nexport interface BlobCopyFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: SyncCopyStatusType;\n /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */\n contentMD5?: Uint8Array;\n /** This response header is returned so that the client can check for the integrity of the copied content. */\n xMsContentCrc64?: Uint8Array;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_copyFromURL operation. */\nexport interface BlobCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_abortCopyFromURL operation. */\nexport interface BlobAbortCopyFromURLHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_abortCopyFromURL operation. */\nexport interface BlobAbortCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTier operation. */\nexport interface BlobSetTierHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTier operation. */\nexport interface BlobSetTierExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getAccountInfo operation. */\nexport interface BlobGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n}\n\n/** Defines headers for Blob_getAccountInfo operation. */\nexport interface BlobGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_query operation. */\nexport interface BlobQueryHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n metadata?: { [propertyName: string]: string };\n /** The number of bytes present in the response body. */\n contentLength?: number;\n /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */\n contentType?: string;\n /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */\n contentRange?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletionTime?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */\n blobContentMD5?: Uint8Array;\n /** Error Code */\n errorCode?: string;\n /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */\n contentCrc64?: Uint8Array;\n}\n\n/** Defines headers for Blob_query operation. */\nexport interface BlobQueryExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getTags operation. */\nexport interface BlobGetTagsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getTags operation. */\nexport interface BlobGetTagsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTags operation. */\nexport interface BlobSetTagsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTags operation. */\nexport interface BlobSetTagsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_create operation. */\nexport interface PageBlobCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_create operation. */\nexport interface PageBlobCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPages operation. */\nexport interface PageBlobUploadPagesHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPages operation. */\nexport interface PageBlobUploadPagesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_clearPages operation. */\nexport interface PageBlobClearPagesHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_clearPages operation. */\nexport interface PageBlobClearPagesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPagesFromURL operation. */\nexport interface PageBlobUploadPagesFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPagesFromURL operation. */\nexport interface PageBlobUploadPagesFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRanges operation. */\nexport interface PageBlobGetPageRangesHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRanges operation. */\nexport interface PageBlobGetPageRangesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRangesDiff operation. */\nexport interface PageBlobGetPageRangesDiffHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRangesDiff operation. */\nexport interface PageBlobGetPageRangesDiffExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_resize operation. */\nexport interface PageBlobResizeHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_resize operation. */\nexport interface PageBlobResizeExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_updateSequenceNumber operation. */\nexport interface PageBlobUpdateSequenceNumberHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_updateSequenceNumber operation. */\nexport interface PageBlobUpdateSequenceNumberExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_copyIncremental operation. */\nexport interface PageBlobCopyIncrementalHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_copyIncremental operation. */\nexport interface PageBlobCopyIncrementalExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_create operation. */\nexport interface AppendBlobCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_create operation. */\nexport interface AppendBlobCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlock operation. */\nexport interface AppendBlobAppendBlockHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */\n blobAppendOffset?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlock operation. */\nexport interface AppendBlobAppendBlockExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlockFromUrl operation. */\nexport interface AppendBlobAppendBlockFromUrlHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */\n blobAppendOffset?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlockFromUrl operation. */\nexport interface AppendBlobAppendBlockFromUrlExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_seal operation. */\nexport interface AppendBlobSealHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** If this blob has been sealed */\n isSealed?: boolean;\n}\n\n/** Defines headers for AppendBlob_seal operation. */\nexport interface AppendBlobSealExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_upload operation. */\nexport interface BlockBlobUploadHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_upload operation. */\nexport interface BlockBlobUploadExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_putBlobFromUrl operation. */\nexport interface BlockBlobPutBlobFromUrlHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_putBlobFromUrl operation. */\nexport interface BlockBlobPutBlobFromUrlExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlock operation. */\nexport interface BlockBlobStageBlockHeaders {\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlock operation. */\nexport interface BlockBlobStageBlockExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlockFromURL operation. */\nexport interface BlockBlobStageBlockFromURLHeaders {\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlockFromURL operation. */\nexport interface BlockBlobStageBlockFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_commitBlockList operation. */\nexport interface BlockBlobCommitBlockListHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_commitBlockList operation. */\nexport interface BlockBlobCommitBlockListExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_getBlockList operation. */\nexport interface BlockBlobGetBlockListHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The media type of the body of the response. For Get Block List this is 'application/xml' */\n contentType?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_getBlockList operation. */\nexport interface BlockBlobGetBlockListExceptionHeaders {\n errorCode?: string;\n}\n\n/** Parameter group */\nexport interface ContainerEncryptionScope {\n /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */\n defaultEncryptionScope?: string;\n /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */\n preventEncryptionScopeOverride?: boolean;\n}\n\n/** Parameter group */\nexport interface LeaseAccessConditions {\n /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */\n leaseId?: string;\n}\n\n/** Parameter group */\nexport interface ModifiedAccessConditions {\n /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */\n ifModifiedSince?: Date;\n /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */\n ifUnmodifiedSince?: Date;\n /** Specify an ETag value to operate only on blobs with a matching value. */\n ifMatch?: string;\n /** Specify an ETag value to operate only on blobs without a matching value. */\n ifNoneMatch?: string;\n /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */\n ifTags?: string;\n}\n\n/** Parameter group */\nexport interface CpkInfo {\n /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionKey?: string;\n /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */\n encryptionKeySha256?: string;\n /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is \"AES256\". Must be provided if the x-ms-encryption-key header is provided. */\n encryptionAlgorithm?: EncryptionAlgorithmType;\n}\n\n/** Parameter group */\nexport interface BlobHttpHeaders {\n /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */\n blobCacheControl?: string;\n /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */\n blobContentType?: string;\n /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */\n blobContentMD5?: Uint8Array;\n /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */\n blobContentEncoding?: string;\n /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */\n blobContentLanguage?: string;\n /** Optional. Sets the blob's Content-Disposition header. */\n blobContentDisposition?: string;\n}\n\n/** Parameter group */\nexport interface SourceModifiedAccessConditions {\n /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */\n sourceIfModifiedSince?: Date;\n /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */\n sourceIfUnmodifiedSince?: Date;\n /** Specify an ETag value to operate only on blobs with a matching value. */\n sourceIfMatch?: string;\n /** Specify an ETag value to operate only on blobs without a matching value. */\n sourceIfNoneMatch?: string;\n /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */\n sourceIfTags?: string;\n}\n\n/** Parameter group */\nexport interface SequenceNumberAccessConditions {\n /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */\n ifSequenceNumberLessThanOrEqualTo?: number;\n /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */\n ifSequenceNumberLessThan?: number;\n /** Specify this header value to operate only on a blob if it has the specified sequence number. */\n ifSequenceNumberEqualTo?: number;\n}\n\n/** Parameter group */\nexport interface AppendPositionAccessConditions {\n /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */\n maxSize?: number;\n /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */\n appendPosition?: number;\n}\n\n/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */\nexport const enum KnownEncryptionAlgorithmType {\n AES256 = \"AES256\"\n}\n\n/**\n * Defines values for EncryptionAlgorithmType. \\\n * {@link KnownEncryptionAlgorithmType} can be used interchangeably with EncryptionAlgorithmType,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **AES256**\n */\nexport type EncryptionAlgorithmType = string;\n\n/** Known values of {@link BlobExpiryOptions} that the service accepts. */\nexport const enum KnownBlobExpiryOptions {\n NeverExpire = \"NeverExpire\",\n RelativeToCreation = \"RelativeToCreation\",\n RelativeToNow = \"RelativeToNow\",\n Absolute = \"Absolute\"\n}\n\n/**\n * Defines values for BlobExpiryOptions. \\\n * {@link KnownBlobExpiryOptions} can be used interchangeably with BlobExpiryOptions,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **NeverExpire** \\\n * **RelativeToCreation** \\\n * **RelativeToNow** \\\n * **Absolute**\n */\nexport type BlobExpiryOptions = string;\n\n/** Known values of {@link StorageErrorCode} that the service accepts. */\nexport const enum KnownStorageErrorCode {\n AccountAlreadyExists = \"AccountAlreadyExists\",\n AccountBeingCreated = \"AccountBeingCreated\",\n AccountIsDisabled = \"AccountIsDisabled\",\n AuthenticationFailed = \"AuthenticationFailed\",\n AuthorizationFailure = \"AuthorizationFailure\",\n ConditionHeadersNotSupported = \"ConditionHeadersNotSupported\",\n ConditionNotMet = \"ConditionNotMet\",\n EmptyMetadataKey = \"EmptyMetadataKey\",\n InsufficientAccountPermissions = \"InsufficientAccountPermissions\",\n InternalError = \"InternalError\",\n InvalidAuthenticationInfo = \"InvalidAuthenticationInfo\",\n InvalidHeaderValue = \"InvalidHeaderValue\",\n InvalidHttpVerb = \"InvalidHttpVerb\",\n InvalidInput = \"InvalidInput\",\n InvalidMd5 = \"InvalidMd5\",\n InvalidMetadata = \"InvalidMetadata\",\n InvalidQueryParameterValue = \"InvalidQueryParameterValue\",\n InvalidRange = \"InvalidRange\",\n InvalidResourceName = \"InvalidResourceName\",\n InvalidUri = \"InvalidUri\",\n InvalidXmlDocument = \"InvalidXmlDocument\",\n InvalidXmlNodeValue = \"InvalidXmlNodeValue\",\n Md5Mismatch = \"Md5Mismatch\",\n MetadataTooLarge = \"MetadataTooLarge\",\n MissingContentLengthHeader = \"MissingContentLengthHeader\",\n MissingRequiredQueryParameter = \"MissingRequiredQueryParameter\",\n MissingRequiredHeader = \"MissingRequiredHeader\",\n MissingRequiredXmlNode = \"MissingRequiredXmlNode\",\n MultipleConditionHeadersNotSupported = \"MultipleConditionHeadersNotSupported\",\n OperationTimedOut = \"OperationTimedOut\",\n OutOfRangeInput = \"OutOfRangeInput\",\n OutOfRangeQueryParameterValue = \"OutOfRangeQueryParameterValue\",\n RequestBodyTooLarge = \"RequestBodyTooLarge\",\n ResourceTypeMismatch = \"ResourceTypeMismatch\",\n RequestUrlFailedToParse = \"RequestUrlFailedToParse\",\n ResourceAlreadyExists = \"ResourceAlreadyExists\",\n ResourceNotFound = \"ResourceNotFound\",\n ServerBusy = \"ServerBusy\",\n UnsupportedHeader = \"UnsupportedHeader\",\n UnsupportedXmlNode = \"UnsupportedXmlNode\",\n UnsupportedQueryParameter = \"UnsupportedQueryParameter\",\n UnsupportedHttpVerb = \"UnsupportedHttpVerb\",\n AppendPositionConditionNotMet = \"AppendPositionConditionNotMet\",\n BlobAlreadyExists = \"BlobAlreadyExists\",\n BlobImmutableDueToPolicy = \"BlobImmutableDueToPolicy\",\n BlobNotFound = \"BlobNotFound\",\n BlobOverwritten = \"BlobOverwritten\",\n BlobTierInadequateForContentLength = \"BlobTierInadequateForContentLength\",\n BlobUsesCustomerSpecifiedEncryption = \"BlobUsesCustomerSpecifiedEncryption\",\n BlockCountExceedsLimit = \"BlockCountExceedsLimit\",\n BlockListTooLong = \"BlockListTooLong\",\n CannotChangeToLowerTier = \"CannotChangeToLowerTier\",\n CannotVerifyCopySource = \"CannotVerifyCopySource\",\n ContainerAlreadyExists = \"ContainerAlreadyExists\",\n ContainerBeingDeleted = \"ContainerBeingDeleted\",\n ContainerDisabled = \"ContainerDisabled\",\n ContainerNotFound = \"ContainerNotFound\",\n ContentLengthLargerThanTierLimit = \"ContentLengthLargerThanTierLimit\",\n CopyAcrossAccountsNotSupported = \"CopyAcrossAccountsNotSupported\",\n CopyIdMismatch = \"CopyIdMismatch\",\n FeatureVersionMismatch = \"FeatureVersionMismatch\",\n IncrementalCopyBlobMismatch = \"IncrementalCopyBlobMismatch\",\n IncrementalCopyOfEarlierVersionSnapshotNotAllowed = \"IncrementalCopyOfEarlierVersionSnapshotNotAllowed\",\n IncrementalCopySourceMustBeSnapshot = \"IncrementalCopySourceMustBeSnapshot\",\n InfiniteLeaseDurationRequired = \"InfiniteLeaseDurationRequired\",\n InvalidBlobOrBlock = \"InvalidBlobOrBlock\",\n InvalidBlobTier = \"InvalidBlobTier\",\n InvalidBlobType = \"InvalidBlobType\",\n InvalidBlockId = \"InvalidBlockId\",\n InvalidBlockList = \"InvalidBlockList\",\n InvalidOperation = \"InvalidOperation\",\n InvalidPageRange = \"InvalidPageRange\",\n InvalidSourceBlobType = \"InvalidSourceBlobType\",\n InvalidSourceBlobUrl = \"InvalidSourceBlobUrl\",\n InvalidVersionForPageBlobOperation = \"InvalidVersionForPageBlobOperation\",\n LeaseAlreadyPresent = \"LeaseAlreadyPresent\",\n LeaseAlreadyBroken = \"LeaseAlreadyBroken\",\n LeaseIdMismatchWithBlobOperation = \"LeaseIdMismatchWithBlobOperation\",\n LeaseIdMismatchWithContainerOperation = \"LeaseIdMismatchWithContainerOperation\",\n LeaseIdMismatchWithLeaseOperation = \"LeaseIdMismatchWithLeaseOperation\",\n LeaseIdMissing = \"LeaseIdMissing\",\n LeaseIsBreakingAndCannotBeAcquired = \"LeaseIsBreakingAndCannotBeAcquired\",\n LeaseIsBreakingAndCannotBeChanged = \"LeaseIsBreakingAndCannotBeChanged\",\n LeaseIsBrokenAndCannotBeRenewed = \"LeaseIsBrokenAndCannotBeRenewed\",\n LeaseLost = \"LeaseLost\",\n LeaseNotPresentWithBlobOperation = \"LeaseNotPresentWithBlobOperation\",\n LeaseNotPresentWithContainerOperation = \"LeaseNotPresentWithContainerOperation\",\n LeaseNotPresentWithLeaseOperation = \"LeaseNotPresentWithLeaseOperation\",\n MaxBlobSizeConditionNotMet = \"MaxBlobSizeConditionNotMet\",\n NoAuthenticationInformation = \"NoAuthenticationInformation\",\n NoPendingCopyOperation = \"NoPendingCopyOperation\",\n OperationNotAllowedOnIncrementalCopyBlob = \"OperationNotAllowedOnIncrementalCopyBlob\",\n PendingCopyOperation = \"PendingCopyOperation\",\n PreviousSnapshotCannotBeNewer = \"PreviousSnapshotCannotBeNewer\",\n PreviousSnapshotNotFound = \"PreviousSnapshotNotFound\",\n PreviousSnapshotOperationNotSupported = \"PreviousSnapshotOperationNotSupported\",\n SequenceNumberConditionNotMet = \"SequenceNumberConditionNotMet\",\n SequenceNumberIncrementTooLarge = \"SequenceNumberIncrementTooLarge\",\n SnapshotCountExceeded = \"SnapshotCountExceeded\",\n SnapshotOperationRateExceeded = \"SnapshotOperationRateExceeded\",\n SnapshotsPresent = \"SnapshotsPresent\",\n SourceConditionNotMet = \"SourceConditionNotMet\",\n SystemInUse = \"SystemInUse\",\n TargetConditionNotMet = \"TargetConditionNotMet\",\n UnauthorizedBlobOverwrite = \"UnauthorizedBlobOverwrite\",\n BlobBeingRehydrated = \"BlobBeingRehydrated\",\n BlobArchived = \"BlobArchived\",\n BlobNotArchived = \"BlobNotArchived\",\n AuthorizationSourceIPMismatch = \"AuthorizationSourceIPMismatch\",\n AuthorizationProtocolMismatch = \"AuthorizationProtocolMismatch\",\n AuthorizationPermissionMismatch = \"AuthorizationPermissionMismatch\",\n AuthorizationServiceMismatch = \"AuthorizationServiceMismatch\",\n AuthorizationResourceTypeMismatch = \"AuthorizationResourceTypeMismatch\"\n}\n\n/**\n * Defines values for StorageErrorCode. \\\n * {@link KnownStorageErrorCode} can be used interchangeably with StorageErrorCode,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **AccountAlreadyExists** \\\n * **AccountBeingCreated** \\\n * **AccountIsDisabled** \\\n * **AuthenticationFailed** \\\n * **AuthorizationFailure** \\\n * **ConditionHeadersNotSupported** \\\n * **ConditionNotMet** \\\n * **EmptyMetadataKey** \\\n * **InsufficientAccountPermissions** \\\n * **InternalError** \\\n * **InvalidAuthenticationInfo** \\\n * **InvalidHeaderValue** \\\n * **InvalidHttpVerb** \\\n * **InvalidInput** \\\n * **InvalidMd5** \\\n * **InvalidMetadata** \\\n * **InvalidQueryParameterValue** \\\n * **InvalidRange** \\\n * **InvalidResourceName** \\\n * **InvalidUri** \\\n * **InvalidXmlDocument** \\\n * **InvalidXmlNodeValue** \\\n * **Md5Mismatch** \\\n * **MetadataTooLarge** \\\n * **MissingContentLengthHeader** \\\n * **MissingRequiredQueryParameter** \\\n * **MissingRequiredHeader** \\\n * **MissingRequiredXmlNode** \\\n * **MultipleConditionHeadersNotSupported** \\\n * **OperationTimedOut** \\\n * **OutOfRangeInput** \\\n * **OutOfRangeQueryParameterValue** \\\n * **RequestBodyTooLarge** \\\n * **ResourceTypeMismatch** \\\n * **RequestUrlFailedToParse** \\\n * **ResourceAlreadyExists** \\\n * **ResourceNotFound** \\\n * **ServerBusy** \\\n * **UnsupportedHeader** \\\n * **UnsupportedXmlNode** \\\n * **UnsupportedQueryParameter** \\\n * **UnsupportedHttpVerb** \\\n * **AppendPositionConditionNotMet** \\\n * **BlobAlreadyExists** \\\n * **BlobImmutableDueToPolicy** \\\n * **BlobNotFound** \\\n * **BlobOverwritten** \\\n * **BlobTierInadequateForContentLength** \\\n * **BlobUsesCustomerSpecifiedEncryption** \\\n * **BlockCountExceedsLimit** \\\n * **BlockListTooLong** \\\n * **CannotChangeToLowerTier** \\\n * **CannotVerifyCopySource** \\\n * **ContainerAlreadyExists** \\\n * **ContainerBeingDeleted** \\\n * **ContainerDisabled** \\\n * **ContainerNotFound** \\\n * **ContentLengthLargerThanTierLimit** \\\n * **CopyAcrossAccountsNotSupported** \\\n * **CopyIdMismatch** \\\n * **FeatureVersionMismatch** \\\n * **IncrementalCopyBlobMismatch** \\\n * **IncrementalCopyOfEarlierVersionSnapshotNotAllowed** \\\n * **IncrementalCopySourceMustBeSnapshot** \\\n * **InfiniteLeaseDurationRequired** \\\n * **InvalidBlobOrBlock** \\\n * **InvalidBlobTier** \\\n * **InvalidBlobType** \\\n * **InvalidBlockId** \\\n * **InvalidBlockList** \\\n * **InvalidOperation** \\\n * **InvalidPageRange** \\\n * **InvalidSourceBlobType** \\\n * **InvalidSourceBlobUrl** \\\n * **InvalidVersionForPageBlobOperation** \\\n * **LeaseAlreadyPresent** \\\n * **LeaseAlreadyBroken** \\\n * **LeaseIdMismatchWithBlobOperation** \\\n * **LeaseIdMismatchWithContainerOperation** \\\n * **LeaseIdMismatchWithLeaseOperation** \\\n * **LeaseIdMissing** \\\n * **LeaseIsBreakingAndCannotBeAcquired** \\\n * **LeaseIsBreakingAndCannotBeChanged** \\\n * **LeaseIsBrokenAndCannotBeRenewed** \\\n * **LeaseLost** \\\n * **LeaseNotPresentWithBlobOperation** \\\n * **LeaseNotPresentWithContainerOperation** \\\n * **LeaseNotPresentWithLeaseOperation** \\\n * **MaxBlobSizeConditionNotMet** \\\n * **NoAuthenticationInformation** \\\n * **NoPendingCopyOperation** \\\n * **OperationNotAllowedOnIncrementalCopyBlob** \\\n * **PendingCopyOperation** \\\n * **PreviousSnapshotCannotBeNewer** \\\n * **PreviousSnapshotNotFound** \\\n * **PreviousSnapshotOperationNotSupported** \\\n * **SequenceNumberConditionNotMet** \\\n * **SequenceNumberIncrementTooLarge** \\\n * **SnapshotCountExceeded** \\\n * **SnapshotOperationRateExceeded** \\\n * **SnapshotsPresent** \\\n * **SourceConditionNotMet** \\\n * **SystemInUse** \\\n * **TargetConditionNotMet** \\\n * **UnauthorizedBlobOverwrite** \\\n * **BlobBeingRehydrated** \\\n * **BlobArchived** \\\n * **BlobNotArchived** \\\n * **AuthorizationSourceIPMismatch** \\\n * **AuthorizationProtocolMismatch** \\\n * **AuthorizationPermissionMismatch** \\\n * **AuthorizationServiceMismatch** \\\n * **AuthorizationResourceTypeMismatch**\n */\nexport type StorageErrorCode = string;\n/** Defines values for GeoReplicationStatusType. */\nexport type GeoReplicationStatusType = \"live\" | \"bootstrap\" | \"unavailable\";\n/** Defines values for ListContainersIncludeType. */\nexport type ListContainersIncludeType = \"metadata\" | \"deleted\" | \"system\";\n/** Defines values for LeaseStatusType. */\nexport type LeaseStatusType = \"locked\" | \"unlocked\";\n/** Defines values for LeaseStateType. */\nexport type LeaseStateType =\n | \"available\"\n | \"leased\"\n | \"expired\"\n | \"breaking\"\n | \"broken\";\n/** Defines values for LeaseDurationType. */\nexport type LeaseDurationType = \"infinite\" | \"fixed\";\n/** Defines values for PublicAccessType. */\nexport type PublicAccessType = \"container\" | \"blob\";\n/** Defines values for SkuName. */\nexport type SkuName =\n | \"Standard_LRS\"\n | \"Standard_GRS\"\n | \"Standard_RAGRS\"\n | \"Standard_ZRS\"\n | \"Premium_LRS\";\n/** Defines values for AccountKind. */\nexport type AccountKind =\n | \"Storage\"\n | \"BlobStorage\"\n | \"StorageV2\"\n | \"FileStorage\"\n | \"BlockBlobStorage\";\n/** Defines values for ListBlobsIncludeItem. */\nexport type ListBlobsIncludeItem =\n | \"copy\"\n | \"deleted\"\n | \"metadata\"\n | \"snapshots\"\n | \"uncommittedblobs\"\n | \"versions\"\n | \"tags\"\n | \"immutabilitypolicy\"\n | \"legalhold\"\n | \"deletedwithversions\";\n/** Defines values for BlobType. */\nexport type BlobType = \"BlockBlob\" | \"PageBlob\" | \"AppendBlob\";\n/** Defines values for CopyStatusType. */\nexport type CopyStatusType = \"pending\" | \"success\" | \"aborted\" | \"failed\";\n/** Defines values for AccessTier. */\nexport type AccessTier =\n | \"P4\"\n | \"P6\"\n | \"P10\"\n | \"P15\"\n | \"P20\"\n | \"P30\"\n | \"P40\"\n | \"P50\"\n | \"P60\"\n | \"P70\"\n | \"P80\"\n | \"Hot\"\n | \"Cool\"\n | \"Archive\"\n | \"Cold\";\n/** Defines values for ArchiveStatus. */\nexport type ArchiveStatus =\n | \"rehydrate-pending-to-hot\"\n | \"rehydrate-pending-to-cool\"\n | \"rehydrate-pending-to-cold\";\n/** Defines values for RehydratePriority. */\nexport type RehydratePriority = \"High\" | \"Standard\";\n/** Defines values for BlobImmutabilityPolicyMode. */\nexport type BlobImmutabilityPolicyMode = \"Mutable\" | \"Unlocked\" | \"Locked\";\n/** Defines values for DeleteSnapshotsOptionType. */\nexport type DeleteSnapshotsOptionType = \"include\" | \"only\";\n/** Defines values for BlobCopySourceTags. */\nexport type BlobCopySourceTags = \"REPLACE\" | \"COPY\";\n/** Defines values for BlockListType. */\nexport type BlockListType = \"committed\" | \"uncommitted\" | \"all\";\n/** Defines values for SequenceNumberActionType. */\nexport type SequenceNumberActionType = \"max\" | \"update\" | \"increment\";\n/** Defines values for QueryFormatType. */\nexport type QueryFormatType = \"delimited\" | \"json\" | \"arrow\" | \"parquet\";\n/** Defines values for SyncCopyStatusType. */\nexport type SyncCopyStatusType = \"success\";\n\n/** Optional parameters. */\nexport interface ServiceSetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setProperties operation. */\nexport type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceSetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders &\n BlobServiceProperties & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobServiceProperties;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetPropertiesHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceGetStatisticsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getStatistics operation. */\nexport type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders &\n BlobServiceStatistics & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobServiceStatistics;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetStatisticsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceListContainersSegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify that the container's metadata be returned as part of the response body. */\n include?: ListContainersIncludeType[];\n}\n\n/** Contains response data for the listContainersSegment operation. */\nexport type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders &\n ListContainersSegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListContainersSegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceListContainersSegmentHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceGetUserDelegationKeyOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getUserDelegationKey operation. */\nexport type ServiceGetUserDelegationKeyResponse = ServiceGetUserDelegationKeyHeaders &\n UserDelegationKey & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: UserDelegationKey;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n };\n };\n\n/** Contains response data for the getAccountInfo operation. */\nexport type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceSubmitBatchOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the submitBatch operation. */\nexport type ServiceSubmitBatchResponse = ServiceSubmitBatchHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceFilterBlobsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Filters the results to return only to return only blobs whose tags match the specified expression. */\n where?: string;\n}\n\n/** Contains response data for the filterBlobs operation. */\nexport type ServiceFilterBlobsResponse = ServiceFilterBlobsHeaders &\n FilterBlobSegment & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: FilterBlobSegment;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceFilterBlobsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n containerEncryptionScope?: ContainerEncryptionScope;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies whether data in the container may be accessed publicly and the level of access */\n access?: PublicAccessType;\n}\n\n/** Contains response data for the create operation. */\nexport type ContainerCreateResponse = ContainerCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerDeleteOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the delete operation. */\nexport type ContainerDeleteResponse = ContainerDeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerDeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerSetMetadataOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n}\n\n/** Contains response data for the setMetadata operation. */\nexport type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSetMetadataHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerGetAccessPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getAccessPolicy operation. */\nexport type ContainerGetAccessPolicyResponse = ContainerGetAccessPolicyHeaders &\n SignedIdentifier[] & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: SignedIdentifier[];\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerSetAccessPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies whether data in the container may be accessed publicly and the level of access */\n access?: PublicAccessType;\n /** the acls for the container */\n containerAcl?: SignedIdentifier[];\n}\n\n/** Contains response data for the setAccessPolicy operation. */\nexport type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSetAccessPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRestoreOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-12-12 and later. Specifies the name of the deleted container to restore. */\n deletedContainerName?: string;\n /** Optional. Version 2019-12-12 and later. Specifies the version of the deleted container to restore. */\n deletedContainerVersion?: string;\n}\n\n/** Contains response data for the restore operation. */\nexport type ContainerRestoreResponse = ContainerRestoreHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRestoreHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRenameOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A lease ID for the source path. If specified, the source path must have an active lease and the lease ID must match. */\n sourceLeaseId?: string;\n}\n\n/** Contains response data for the rename operation. */\nexport type ContainerRenameResponse = ContainerRenameHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRenameHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerSubmitBatchOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the submitBatch operation. */\nexport type ContainerSubmitBatchResponse = ContainerSubmitBatchHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSubmitBatchHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerFilterBlobsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Filters the results to return only to return only blobs whose tags match the specified expression. */\n where?: string;\n}\n\n/** Contains response data for the filterBlobs operation. */\nexport type ContainerFilterBlobsResponse = ContainerFilterBlobsHeaders &\n FilterBlobSegment & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: FilterBlobSegment;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerFilterBlobsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerAcquireLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease duration cannot be changed using renew or change. */\n duration?: number;\n /** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */\n proposedLeaseId?: string;\n}\n\n/** Contains response data for the acquireLease operation. */\nexport type ContainerAcquireLeaseResponse = ContainerAcquireLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerAcquireLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerReleaseLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the releaseLease operation. */\nexport type ContainerReleaseLeaseResponse = ContainerReleaseLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerReleaseLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRenewLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the renewLease operation. */\nexport type ContainerRenewLeaseResponse = ContainerRenewLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRenewLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerBreakLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */\n breakPeriod?: number;\n}\n\n/** Contains response data for the breakLease operation. */\nexport type ContainerBreakLeaseResponse = ContainerBreakLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerBreakLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerChangeLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the changeLease operation. */\nexport type ContainerChangeLeaseResponse = ContainerChangeLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerChangeLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerListBlobFlatSegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify one or more datasets to include in the response. */\n include?: ListBlobsIncludeItem[];\n}\n\n/** Contains response data for the listBlobFlatSegment operation. */\nexport type ContainerListBlobFlatSegmentResponse = ContainerListBlobFlatSegmentHeaders &\n ListBlobsFlatSegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListBlobsFlatSegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerListBlobHierarchySegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify one or more datasets to include in the response. */\n include?: ListBlobsIncludeItem[];\n}\n\n/** Contains response data for the listBlobHierarchySegment operation. */\nexport type ContainerListBlobHierarchySegmentResponse = ContainerListBlobHierarchySegmentHeaders &\n ListBlobsHierarchySegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListBlobsHierarchySegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n };\n };\n\n/** Contains response data for the getAccountInfo operation. */\nexport type ContainerGetAccountInfoResponse = ContainerGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDownloadOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */\n rangeGetContentMD5?: boolean;\n /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */\n rangeGetContentCRC64?: boolean;\n}\n\n/** Contains response data for the download operation. */\nexport type BlobDownloadResponse = BlobDownloadHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDownloadHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type BlobGetPropertiesResponse = BlobGetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDeleteOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete only the blob's snapshots and not the blob itself */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /** Optional. Only possible value is 'permanent', which specifies to permanently delete a blob if blob soft delete is enabled. */\n blobDeleteType?: string;\n}\n\n/** Contains response data for the delete operation. */\nexport type BlobDeleteResponse = BlobDeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobUndeleteOptionalParams extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the undelete operation. */\nexport type BlobUndeleteResponse = BlobUndeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobUndeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetExpiryOptionalParams extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The time to set the blob to expiry */\n expiresOn?: string;\n}\n\n/** Contains response data for the setExpiry operation. */\nexport type BlobSetExpiryResponse = BlobSetExpiryHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetExpiryHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetHttpHeadersOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setHttpHeaders operation. */\nexport type BlobSetHttpHeadersResponse = BlobSetHttpHeadersHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetHttpHeadersHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetImmutabilityPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n}\n\n/** Contains response data for the setImmutabilityPolicy operation. */\nexport type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetImmutabilityPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDeleteImmutabilityPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the deleteImmutabilityPolicy operation. */\nexport type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDeleteImmutabilityPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetLegalHoldOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setLegalHold operation. */\nexport type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetLegalHoldHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetMetadataOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the setMetadata operation. */\nexport type BlobSetMetadataResponse = BlobSetMetadataHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetMetadataHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobAcquireLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease duration cannot be changed using renew or change. */\n duration?: number;\n /** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */\n proposedLeaseId?: string;\n}\n\n/** Contains response data for the acquireLease operation. */\nexport type BlobAcquireLeaseResponse = BlobAcquireLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobAcquireLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobReleaseLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the releaseLease operation. */\nexport type BlobReleaseLeaseResponse = BlobReleaseLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobReleaseLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobRenewLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the renewLease operation. */\nexport type BlobRenewLeaseResponse = BlobRenewLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobRenewLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobChangeLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the changeLease operation. */\nexport type BlobChangeLeaseResponse = BlobChangeLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobChangeLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobBreakLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */\n breakPeriod?: number;\n}\n\n/** Contains response data for the breakLease operation. */\nexport type BlobBreakLeaseResponse = BlobBreakLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobBreakLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobCreateSnapshotOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the createSnapshot operation. */\nexport type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobCreateSnapshotHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobStartCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional: Indicates the priority with which to rehydrate an archived blob. */\n rehydratePriority?: RehydratePriority;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Overrides the sealed state of the destination blob. Service version 2019-12-12 and newer. */\n sealBlob?: boolean;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n}\n\n/** Contains response data for the startCopyFromURL operation. */\nexport type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobStartCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by x-ms-tags. */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/** Contains response data for the copyFromURL operation. */\nexport type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobAbortCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the abortCopyFromURL operation. */\nexport type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobAbortCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetTierOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Optional: Indicates the priority with which to rehydrate an archived blob. */\n rehydratePriority?: RehydratePriority;\n}\n\n/** Contains response data for the setTier operation. */\nexport type BlobSetTierResponse = BlobSetTierHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetTierHeaders;\n };\n};\n\n/** Contains response data for the getAccountInfo operation. */\nexport type BlobGetAccountInfoResponse = BlobGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobQueryOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** the query request */\n queryRequest?: QueryRequest;\n}\n\n/** Contains response data for the query operation. */\nexport type BlobQueryResponse = BlobQueryHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobQueryHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobGetTagsOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n}\n\n/** Contains response data for the getTags operation. */\nexport type BlobGetTagsResponse = BlobGetTagsHeaders &\n BlobTags & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobTags;\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetTagsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface BlobSetTagsOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Blob tags */\n tags?: BlobTags;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the setTags operation. */\nexport type BlobSetTagsResponse = BlobSetTagsHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetTagsHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 and 2^63 - 1. */\n blobSequenceNumber?: number;\n}\n\n/** Contains response data for the create operation. */\nexport type PageBlobCreateResponse = PageBlobCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUploadPagesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the uploadPages operation. */\nexport type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUploadPagesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobClearPagesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the clearPages operation. */\nexport type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobClearPagesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUploadPagesFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the uploadPagesFromURL operation. */\nexport type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUploadPagesFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobGetPageRangesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n}\n\n/** Contains response data for the getPageRanges operation. */\nexport type PageBlobGetPageRangesResponse = PageBlobGetPageRangesHeaders &\n PageList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: PageList;\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface PageBlobGetPageRangesDiffOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs created on or after January 1, 2016. */\n prevsnapshot?: string;\n /** Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous snapshot. */\n prevSnapshotUrl?: string;\n}\n\n/** Contains response data for the getPageRangesDiff operation. */\nexport type PageBlobGetPageRangesDiffResponse = PageBlobGetPageRangesDiffHeaders &\n PageList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: PageList;\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface PageBlobResizeOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the resize operation. */\nexport type PageBlobResizeResponse = PageBlobResizeHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobResizeHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUpdateSequenceNumberOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 and 2^63 - 1. */\n blobSequenceNumber?: number;\n}\n\n/** Contains response data for the updateSequenceNumber operation. */\nexport type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUpdateSequenceNumberHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobCopyIncrementalOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the copyIncremental operation. */\nexport type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobCopyIncrementalHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n}\n\n/** Contains response data for the create operation. */\nexport type AppendBlobCreateResponse = AppendBlobCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobAppendBlockOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the appendBlock operation. */\nexport type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobAppendBlockHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobAppendBlockFromUrlOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n /** Bytes of source data in the specified range. */\n sourceRange?: string;\n}\n\n/** Contains response data for the appendBlockFromUrl operation. */\nexport type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobAppendBlockFromUrlHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobSealOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the seal operation. */\nexport type AppendBlobSealResponse = AppendBlobSealHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobSealHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobUploadOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the upload operation. */\nexport type BlockBlobUploadResponse = BlockBlobUploadHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobUploadHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobPutBlobFromUrlOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by x-ms-tags. */\n copySourceTags?: BlobCopySourceTags;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Optional, default is true. Indicates if properties from the source blob should be copied. */\n copySourceBlobProperties?: boolean;\n}\n\n/** Contains response data for the putBlobFromUrl operation. */\nexport type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobPutBlobFromUrlHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobStageBlockOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the stageBlock operation. */\nexport type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobStageBlockHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobStageBlockFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n /** Bytes of source data in the specified range. */\n sourceRange?: string;\n}\n\n/** Contains response data for the stageBlockFromURL operation. */\nexport type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobStageBlockFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobCommitBlockListOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the commitBlockList operation. */\nexport type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobCommitBlockListHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobGetBlockListOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n}\n\n/** Contains response data for the getBlockList operation. */\nexport type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders &\n BlockList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlockList;\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobGetBlockListHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface StorageClientOptionalParams\n extends coreHttp.ServiceClientOptions {\n /** Specifies the version of the operation to use for this request. */\n version?: string;\n /** Overrides client endpoint. */\n endpoint?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js new file mode 100644 index 0000000..e5d4ba2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js @@ -0,0 +1,8202 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +export const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +export const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +export const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +export const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +export const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +export const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +export const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +export const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +export const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +export const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +export const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +export const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +export const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +export const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +export const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +export const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +export const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +export const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +export const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, + type: { + name: "String" + } + } + } + } +}; +export const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +export const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +export const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +export const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +export const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +export const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +export const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +export const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +export const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +export const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +export const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +export const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +export const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +export const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +export const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +export const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +export const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +export const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +export const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +export const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +export const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +export const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +export const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +//# sourceMappingURL=mappers.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map new file mode 100644 index 0000000..1a1cc48 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mappers.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/mappers.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAIH,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,0BAA0B;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,oBAAoB,EAAE;gBACpB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,UAAU;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,UAAU;yBACtB;qBACF;iBACF;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,eAAe;iBAC3B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAA6B;IACvD,cAAc,EAAE,iBAAiB;IACjC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iBAAiB;QAC5B,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,IAAI,EAAE;gBACJ,WAAW,EAAE;oBACX,gBAAgB,EAAE,CAAC;iBACpB;gBACD,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,WAAW,EAAE;oBACX,gBAAgB,EAAE,CAAC;iBACpB;gBACD,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAA6B;IACrD,cAAc,EAAE,eAAe;IAC/B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,eAAe;QAC1B,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,oBAAoB,EAAE;gBACpB,cAAc,EAAE,sBAAsB;gBACtC,OAAO,EAAE,sBAAsB;gBAC/B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,qBAAqB;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,gBAAgB;iBAC5B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA6B;IACtD,cAAc,EAAE,gBAAgB;IAChC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gBAAgB;QAC3B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,WAAW,EAAE,aAAa,CAAC;iBACpD;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,cAAc;gBAC9B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,+BAA+B;IAC/C,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,YAAY;gBACrB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,eAAe;yBAC3B;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAA6B;IACrD,cAAc,EAAE,eAAe;IAC/B,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,eAAe;QAC1B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,qBAAqB;iBACjC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,8BAA8B,EAAE;gBAC9B,cAAc,EAAE,6BAA6B;gBAC7C,OAAO,EAAE,6BAA6B;gBACtC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uCAAuC,EAAE;gBACvC,cAAc,EAAE,uCAAuC;gBACvD,OAAO,EAAE,uCAAuC;gBAChD,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,mBAAmB;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,aAAa;gBAC7B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,cAAc;gBAC9B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,mBAAmB;IACnC,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,gBAAgB;yBAC5B;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA6B;IACtD,cAAc,EAAE,gBAAgB;IAChC,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gBAAgB;QAC3B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,KAAK;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,SAAS;yBACrB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,OAAO,EAAE,KAAK;IACd,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,kBAAkB;IAClC,OAAO,EAAE,kBAAkB;IAC3B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,EAAE,EAAE;gBACF,cAAc,EAAE,IAAI;gBACpB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,IAAI;gBACb,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,cAAc;iBAC1B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,OAAO;gBACvB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,8BAA8B;IAC9C,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,qBAAqB;iBACjC;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,qBAAqB;IACrC,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,kBAAkB;yBAC9B;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,kBAAkB;IAClC,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,wBAAwB;iBACpC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,WAAW,EAAE,IAAI;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,wBAAwB;IACxC,OAAO,EAAE,YAAY;IACrB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,IAAI;wBACJ,IAAI;wBACJ,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,MAAM;wBACN,SAAS;wBACT,MAAM;qBACP;iBACF;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,0BAA0B;wBAC1B,2BAA2B;wBAC3B,2BAA2B;qBAC5B;iBACF;aACF;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,sBAAsB;gBACtC,OAAO,EAAE,sBAAsB;gBAC/B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;iBACpC;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,6BAA6B;gBAC7C,OAAO,EAAE,6BAA6B;gBACtC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,mCAAmC;IACnD,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,0BAA0B;iBACtC;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAA6B;IAChE,cAAc,EAAE,0BAA0B;IAC1C,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0BAA0B;QACrC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,cAAc,EAAE,YAAY;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,kBAAkB;yBAC9B;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAA6B;IACvD,cAAc,EAAE,iBAAiB;IACjC,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iBAAiB;QAC5B,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,cAAc,EAAE,aAAa;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,cAAc,EAAE,QAAQ;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA6B;IACjD,cAAc,EAAE,WAAW;IAC3B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,WAAW;QACtB,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,OAAO;yBACnB;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,OAAO;yBACnB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA6B;IAC7C,cAAc,EAAE,OAAO;IACvB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,OAAO;QAClB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,WAAW;yBACvB;qBACF;iBACF;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,cAAc,EAAE,YAAY;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA6B;IACjD,cAAc,EAAE,WAAW;IAC3B,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,WAAW;QACtB,eAAe,EAAE;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,OAAO,EAAE,YAAY;IACrB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,OAAO,EAAE,cAAc;IACvB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,oBAAoB;IACpC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,aAAa;iBACzB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAA6B;IACnD,cAAc,EAAE,aAAa;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,aAAa;QACxB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,CAAC;iBACzD;aACF;YACD,0BAA0B,EAAE;gBAC1B,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,4BAA4B;iBACxC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,uBAAuB;iBACnC;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,KAAK;iBACZ;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,4BAA4B;IAC5C,OAAO,EAAE,4BAA4B;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,uBAAuB;IAChC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,oBAAoB;IACpC,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2CAA2C,GAA6B;IACnF,cAAc,EAAE,8CAA8C;IAC9D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6CAA6C;QACxD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;YACD,8BAA8B,EAAE;gBAC9B,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,gCAAgC;IAChD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,uCAAuC,EAAE;gBACvC,cAAc,EAAE,gDAAgD;gBAChE,OAAO,EAAE,gDAAgD;gBACzD,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sCAAsC,GAA6B;IAC9E,cAAc,EAAE,yCAAyC;IACzD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wCAAwC;QACnD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iDAAiD,GAA6B;IACzF,cAAc,EAAE,oDAAoD;IACpE,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mDAAmD;QAC9D,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,sBAAsB;IACtC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,UAAU;aACnC;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAA6B;IAChE,cAAc,EAAE,2BAA2B;IAC3C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0BAA0B;QACrC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,UAAU;aACnC;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,gCAAgC;gBAChD,OAAO,EAAE,gCAAgC;gBACzC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;iBACpC;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,oBAAoB;IACpC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,sBAAsB;IACtC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAA6B;IAC5D,cAAc,EAAE,uBAAuB;IACvC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sBAAsB;QACjC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,gCAAgC;IAChD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yCAAyC,GAA6B;IACjF,cAAc,EAAE,4CAA4C;IAC5D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2CAA2C;QACtD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,YAAY,EAAE,SAAS;gBACvB,UAAU,EAAE,IAAI;gBAChB,cAAc,EAAE,kBAAkB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,mBAAmB;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0CAA0C,GAA6B;IAClF,cAAc,EAAE,6CAA6C;IAC7D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4CAA4C;QACvD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yCAAyC,GAA6B;IACjF,cAAc,EAAE,4CAA4C;IAC5D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2CAA2C;QACtD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0CAA0C,GAA6B;IAClF,cAAc,EAAE,6CAA6C;IAC7D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4CAA4C;QACvD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\nexport const BlobServiceProperties: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceProperties\",\n xmlName: \"StorageServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n serializedName: \"Logging\",\n xmlName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n serializedName: \"HourMetrics\",\n xmlName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n serializedName: \"MinuteMetrics\",\n xmlName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n serializedName: \"Cors\",\n xmlName: \"Cors\",\n xmlIsWrapped: true,\n xmlElementName: \"CorsRule\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n serializedName: \"DefaultServiceVersion\",\n xmlName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n serializedName: \"DeleteRetentionPolicy\",\n xmlName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n serializedName: \"StaticWebsite\",\n xmlName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\n\nexport const Logging: coreHttp.CompositeMapper = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n required: true,\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n serializedName: \"Delete\",\n required: true,\n xmlName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n serializedName: \"Read\",\n required: true,\n xmlName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n serializedName: \"Write\",\n required: true,\n xmlName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const RetentionPolicy: coreHttp.CompositeMapper = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"Days\",\n xmlName: \"Days\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const Metrics: coreHttp.CompositeMapper = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n serializedName: \"IncludeAPIs\",\n xmlName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const CorsRule: coreHttp.CompositeMapper = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n serializedName: \"AllowedOrigins\",\n required: true,\n xmlName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n serializedName: \"AllowedMethods\",\n required: true,\n xmlName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n serializedName: \"AllowedHeaders\",\n required: true,\n xmlName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n serializedName: \"ExposedHeaders\",\n required: true,\n xmlName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"MaxAgeInSeconds\",\n required: true,\n xmlName: \"MaxAgeInSeconds\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const StaticWebsite: coreHttp.CompositeMapper = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n serializedName: \"IndexDocument\",\n xmlName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n serializedName: \"ErrorDocument404Path\",\n xmlName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n serializedName: \"DefaultIndexDocumentPath\",\n xmlName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const StorageError: coreHttp.CompositeMapper = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n serializedName: \"Message\",\n xmlName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n serializedName: \"Code\",\n xmlName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobServiceStatistics: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceStatistics\",\n xmlName: \"StorageServiceStats\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n serializedName: \"GeoReplication\",\n xmlName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\n\nexport const GeoReplication: coreHttp.CompositeMapper = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n serializedName: \"Status\",\n required: true,\n xmlName: \"Status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"live\", \"bootstrap\", \"unavailable\"]\n }\n },\n lastSyncOn: {\n serializedName: \"LastSyncTime\",\n required: true,\n xmlName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ListContainersSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListContainersSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n serializedName: \"ContainerItems\",\n required: true,\n xmlName: \"Containers\",\n xmlIsWrapped: true,\n xmlElementName: \"Container\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerItem: coreHttp.CompositeMapper = {\n serializedName: \"ContainerItem\",\n xmlName: \"Container\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n }\n }\n }\n};\n\nexport const ContainerProperties: coreHttp.CompositeMapper = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n publicAccess: {\n serializedName: \"PublicAccess\",\n xmlName: \"PublicAccess\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"HasImmutabilityPolicy\",\n xmlName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"HasLegalHold\",\n xmlName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"DefaultEncryptionScope\",\n xmlName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n serializedName: \"DenyEncryptionScopeOverride\",\n xmlName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"ImmutableStorageWithVersioningEnabled\",\n xmlName: \"ImmutableStorageWithVersioningEnabled\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const KeyInfo: coreHttp.CompositeMapper = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n required: true,\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const UserDelegationKey: coreHttp.CompositeMapper = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n serializedName: \"SignedOid\",\n required: true,\n xmlName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n serializedName: \"SignedTid\",\n required: true,\n xmlName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n serializedName: \"SignedStart\",\n required: true,\n xmlName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n serializedName: \"SignedExpiry\",\n required: true,\n xmlName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n serializedName: \"SignedService\",\n required: true,\n xmlName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n serializedName: \"SignedVersion\",\n required: true,\n xmlName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobSegment: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobSegment\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n where: {\n serializedName: \"Where\",\n required: true,\n xmlName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n serializedName: \"Blobs\",\n required: true,\n xmlName: \"Blobs\",\n xmlIsWrapped: true,\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobItem: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobItem\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n serializedName: \"Tags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\n\nexport const BlobTags: coreHttp.CompositeMapper = {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n serializedName: \"BlobTagSet\",\n required: true,\n xmlName: \"TagSet\",\n xmlIsWrapped: true,\n xmlElementName: \"Tag\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobTag: coreHttp.CompositeMapper = {\n serializedName: \"BlobTag\",\n xmlName: \"Tag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n serializedName: \"Key\",\n required: true,\n xmlName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const SignedIdentifier: coreHttp.CompositeMapper = {\n serializedName: \"SignedIdentifier\",\n xmlName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n serializedName: \"Id\",\n required: true,\n xmlName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n serializedName: \"AccessPolicy\",\n xmlName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\n\nexport const AccessPolicy: coreHttp.CompositeMapper = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n serializedName: \"Permission\",\n xmlName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsFlatSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsFlatSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobFlatListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobFlatListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobItemInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobItemInternal\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n required: true,\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n serializedName: \"Snapshot\",\n required: true,\n xmlName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"VersionId\",\n xmlName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"IsCurrentVersion\",\n xmlName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n blobTags: {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n serializedName: \"ObjectReplicationMetadata\",\n xmlName: \"OrMetadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n hasVersionsOnly: {\n serializedName: \"HasVersionsOnly\",\n xmlName: \"HasVersionsOnly\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobName: coreHttp.CompositeMapper = {\n serializedName: \"BlobName\",\n type: {\n name: \"Composite\",\n className: \"BlobName\",\n modelProperties: {\n encoded: {\n serializedName: \"Encoded\",\n xmlName: \"Encoded\",\n xmlIsAttribute: true,\n type: {\n name: \"Boolean\"\n }\n },\n content: {\n serializedName: \"content\",\n xmlName: \"content\",\n xmlIsMsText: true,\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobPropertiesInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobPropertiesInternal\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n serializedName: \"Creation-Time\",\n xmlName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"Content-Length\",\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"Content-Type\",\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n serializedName: \"Content-Encoding\",\n xmlName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"Content-Language\",\n xmlName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n serializedName: \"Content-Disposition\",\n xmlName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"Cache-Control\",\n xmlName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"BlobType\",\n xmlName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n copyId: {\n serializedName: \"CopyId\",\n xmlName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"CopyStatus\",\n xmlName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n copySource: {\n serializedName: \"CopySource\",\n xmlName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"CopyProgress\",\n xmlName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n serializedName: \"CopyCompletionTime\",\n xmlName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"CopyStatusDescription\",\n xmlName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n serializedName: \"ServerEncrypted\",\n xmlName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n serializedName: \"IncrementalCopy\",\n xmlName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"DestinationSnapshot\",\n xmlName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n serializedName: \"AccessTier\",\n xmlName: \"AccessTier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\",\n \"Cold\"\n ]\n }\n },\n accessTierInferred: {\n serializedName: \"AccessTierInferred\",\n xmlName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"ArchiveStatus\",\n xmlName: \"ArchiveStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"rehydrate-pending-to-hot\",\n \"rehydrate-pending-to-cool\",\n \"rehydrate-pending-to-cold\"\n ]\n }\n },\n customerProvidedKeySha256: {\n serializedName: \"CustomerProvidedKeySha256\",\n xmlName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"EncryptionScope\",\n xmlName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"AccessTierChangeTime\",\n xmlName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n serializedName: \"TagCount\",\n xmlName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry-Time\",\n xmlName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"Sealed\",\n xmlName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"RehydratePriority\",\n xmlName: \"RehydratePriority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessedOn: {\n serializedName: \"LastAccessTime\",\n xmlName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"ImmutabilityPolicyUntilDate\",\n xmlName: \"ImmutabilityPolicyUntilDate\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"ImmutabilityPolicyMode\",\n xmlName: \"ImmutabilityPolicyMode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"LegalHold\",\n xmlName: \"LegalHold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n serializedName: \"Delimiter\",\n xmlName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobHierarchyListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobHierarchyListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n serializedName: \"BlobPrefixes\",\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobPrefix: coreHttp.CompositeMapper = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n }\n }\n }\n};\n\nexport const BlockLookupList: coreHttp.CompositeMapper = {\n serializedName: \"BlockLookupList\",\n xmlName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n serializedName: \"Committed\",\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n serializedName: \"Uncommitted\",\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n serializedName: \"Latest\",\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlockList: coreHttp.CompositeMapper = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n serializedName: \"CommittedBlocks\",\n xmlName: \"CommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n serializedName: \"UncommittedBlocks\",\n xmlName: \"UncommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const Block: coreHttp.CompositeMapper = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n serializedName: \"Size\",\n required: true,\n xmlName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const PageList: coreHttp.CompositeMapper = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageRange: coreHttp.CompositeMapper = {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ClearRange: coreHttp.CompositeMapper = {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const QueryRequest: coreHttp.CompositeMapper = {\n serializedName: \"QueryRequest\",\n xmlName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n serializedName: \"QueryType\",\n required: true,\n xmlName: \"QueryType\",\n type: {\n name: \"String\"\n }\n },\n expression: {\n serializedName: \"Expression\",\n required: true,\n xmlName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n serializedName: \"InputSerialization\",\n xmlName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n serializedName: \"OutputSerialization\",\n xmlName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\n\nexport const QuerySerialization: coreHttp.CompositeMapper = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n serializedName: \"Format\",\n xmlName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\n\nexport const QueryFormat: coreHttp.CompositeMapper = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"delimited\", \"json\", \"arrow\", \"parquet\"]\n }\n },\n delimitedTextConfiguration: {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n },\n parquetTextConfiguration: {\n serializedName: \"ParquetTextConfiguration\",\n xmlName: \"ParquetTextConfiguration\",\n type: {\n name: \"any\"\n }\n }\n }\n }\n};\n\nexport const DelimitedTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n serializedName: \"ColumnSeparator\",\n xmlName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n serializedName: \"FieldQuote\",\n xmlName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n serializedName: \"EscapeChar\",\n xmlName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n serializedName: \"HeadersPresent\",\n xmlName: \"HasHeaders\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const JsonTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ArrowConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n serializedName: \"Schema\",\n required: true,\n xmlName: \"Schema\",\n xmlIsWrapped: true,\n xmlElementName: \"Field\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const ArrowField: coreHttp.CompositeMapper = {\n serializedName: \"ArrowField\",\n xmlName: \"Field\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n serializedName: \"Precision\",\n xmlName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n serializedName: \"Scale\",\n xmlName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n xmlName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n xmlName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n xmlName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"x-ms-immutable-storage-with-versioning-enabled\",\n xmlName: \"x-ms-immutable-storage-with-versioning-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n xmlName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n xmlName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n xmlName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n xmlName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n xmlName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiry: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n xmlName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n defaultValue: \"success\",\n isConstant: true,\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js new file mode 100644 index 0000000..99de150 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js @@ -0,0 +1,1610 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import { QueryCollectionFormat } from "@azure/core-http"; +import { BlobServiceProperties as BlobServicePropertiesMapper, KeyInfo as KeyInfoMapper, QueryRequest as QueryRequestMapper, BlobTags as BlobTagsMapper, BlockLookupList as BlockLookupListMapper } from "../models/mappers"; +export const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +export const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServicePropertiesMapper +}; +export const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +export const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +export const version = { + parameterPath: "version", + mapper: { + defaultValue: "2023-11-03", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +export const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +export const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +export const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +export const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +export const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: QueryCollectionFormat.Csv +}; +export const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfoMapper +}; +export const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +export const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +export const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +export const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +export const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +export const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +export const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +export const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +export const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +export const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +export const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +export const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +export const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +export const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +export const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +export const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +export const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +export const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +export const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +export const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: QueryCollectionFormat.Csv +}; +export const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +export const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +export const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +export const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +export const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +export const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +export const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +export const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +export const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +export const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +export const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +export const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +export const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +export const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +export const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +export const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +export const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +export const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +export const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +export const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +export const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +export const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +export const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +export const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +export const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +export const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + } +}; +export const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +export const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +export const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +export const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +export const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +export const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +export const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +export const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +export const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +export const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +export const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +export const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } +}; +export const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +export const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +export const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + } +}; +export const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequestMapper +}; +export const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTagsMapper +}; +export const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +export const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +export const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +export const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +export const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +export const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +export const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +export const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +export const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +export const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +export const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +export const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +export const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +export const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +export const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +export const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +export const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +export const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +export const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +export const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +export const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +export const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +export const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +export const blocks = { + parameterPath: "blocks", + mapper: BlockLookupListMapper +}; +export const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; +//# sourceMappingURL=parameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map new file mode 100644 index 0000000..9b1eac5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parameters.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/parameters.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAIL,qBAAqB,EACtB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EACL,qBAAqB,IAAI,2BAA2B,EACpD,OAAO,IAAI,aAAa,EACxB,YAAY,IAAI,kBAAkB,EAClC,QAAQ,IAAI,cAAc,EAC1B,eAAe,IAAI,qBAAqB,EACzC,MAAM,mBAAmB,CAAC;AAE3B,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAuB;IACvD,aAAa,EAAE,uBAAuB;IACtC,MAAM,EAAE,2BAA2B;CACpC,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,GAAG,GAA0B;IACxC,aAAa,EAAE,KAAK;IACpB,MAAM,EAAE;QACN,cAAc,EAAE,KAAK;QACrB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,KAAK;QACd,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;IACD,YAAY,EAAE,IAAI;CACnB,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAA4B;IAC3C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA4B;IACvD,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;IAC9C,MAAM,EAAE;QACN,WAAW,EAAE;YACX,gBAAgB,EAAE,CAAC;SACpB;QACD,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAA4B;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,WAAW,EAAE;YACX,gBAAgB,EAAE,CAAC;SACpB;QACD,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,cAAc,EAAE,2BAA2B;QAC3C,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,CAAC;iBACjD;aACF;SACF;KACF;IACD,gBAAgB,EAAE,qBAAqB,CAAC,GAAG;CAC5C,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE,aAAa;CACtB,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,mBAAmB;QACjC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,MAAM;QACtB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,MAAM;QACf,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,eAAe;IAC9B,MAAM,EAAE;QACN,cAAc,EAAE,gBAAgB;QAChC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,gBAAgB;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,sBAAsB;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;IACnC,MAAM,EAAE;QACN,cAAc,EAAE,OAAO;QACvB,OAAO,EAAE,OAAO;QAChB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;SACpC;QACD,sBAAsB,EAAE,YAAY;KACrC;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;SACrC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,wBAAwB;KACzB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAAuB;IAChE,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,gCAAgC;KACjC;IACD,MAAM,EAAE;QACN,cAAc,EAAE,qCAAqC;QACrD,OAAO,EAAE,qCAAqC;QAC9C,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,uBAAuB,EAAE,SAAS,CAAC;IAC9D,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,iBAAiB,CAAC;IACzE,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,mBAAmB,CAAC;IAC3E,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,KAAK;QACnB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,mBAAmB;QAC5B,YAAY,EAAE,IAAI;QAClB,cAAc,EAAE,kBAAkB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,kBAAkB;iBAC9B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;IAClD,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,gCAAgC;QAChD,OAAO,EAAE,gCAAgC;QACzC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,qBAAqB;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,eAAe,CAAC;IAC3C,MAAM,EAAE;QACN,cAAc,EAAE,sBAAsB;QACtC,OAAO,EAAE,sBAAsB;QAC/B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,iBAAiB;IAChC,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,cAAc,EAAE,sBAAsB;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,MAAM;wBACN,SAAS;wBACT,UAAU;wBACV,WAAW;wBACX,kBAAkB;wBAClB,UAAU;wBACV,MAAM;wBACN,oBAAoB;wBACpB,WAAW;wBACX,qBAAqB;qBACtB;iBACF;aACF;SACF;KACF;IACD,gBAAgB,EAAE,qBAAqB,CAAC,GAAG;CAC5C,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA4B;IAChD,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,UAAU;QAC1B,OAAO,EAAE,UAAU;QACnB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA4B;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;IACnC,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;IAClD,MAAM,EAAE;QACN,cAAc,EAAE,8BAA8B;QAC9C,OAAO,EAAE,8BAA8B;QACvC,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,eAAe,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;IAC5D,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;IAC5D,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,SAAS,CAAC;IACjE,MAAM,EAAE;QACN,cAAc,EAAE,UAAU;QAC1B,OAAO,EAAE,UAAU;QACnB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,aAAa,CAAC;IACrE,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,QAAQ,CAAC;IAChE,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;SACnC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA4B;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,eAAe;IAC9B,MAAM,EAAE;QACN,cAAc,EAAE,oBAAoB;QACpC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,oBAAoB;QAC7B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,CAAC;IACjE,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;IAChE,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;IAC/D,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;IACpE,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;IACpE,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,wBAAwB,CAAC;IACvE,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,sBAAsB;QACpC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,qCAAqC;QACrD,OAAO,EAAE,qCAAqC;QAC9C,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE,CAAC,SAAS,EAAE,wBAAwB,CAAC;IACpD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;SACjD;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,iBAAiB;QACjC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,iBAAiB;QAC1B,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;IAClC,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;gBACT,MAAM;aACP;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,mBAAmB,CAAC;IAC/C,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;SACpC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAuB;IACvD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,uBAAuB;KACxB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;KAC1B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,iCAAiC;QACjD,OAAO,EAAE,iCAAiC;QAC1C,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,eAAe,CAAC;IAC7E,MAAM,EAAE;QACN,cAAc,EAAE,sBAAsB;QACtC,OAAO,EAAE,sBAAsB;QAC/B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mBAAmB;KACpB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,cAAc,CAAC;IAC5E,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,YAAY;IAC3B,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,gBAAgB;QAChC,OAAO,EAAE,gBAAgB;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,iBAAiB;QACjC,OAAO,EAAE,iBAAiB;QAC1B,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,iBAAiB;IAChC,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,oBAAoB;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;IAC9C,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,gCAAgC;QAChD,OAAO,EAAE,gCAAgC;QACzC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;SACnC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,yBAAyB;IACxC,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,kBAAkB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;gBACT,MAAM;aACP;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE,kBAAkB;CAC3B,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;IAClC,MAAM,EAAE,cAAc;CACvB,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,aAAa;QAC7B,OAAO,EAAE,aAAa;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAAuB;IAC3D,aAAa,EAAE,CAAC,SAAS,EAAE,2BAA2B,CAAC;IACvD,MAAM,EAAE;QACN,cAAc,EAAE,oBAAoB;QACpC,OAAO,EAAE,oBAAoB;QAC7B,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,mBAAmB;IAClC,MAAM,EAAE;QACN,cAAc,EAAE,0BAA0B;QAC1C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,0BAA0B;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,YAAY,EAAE,0BAA0B;QACxC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,MAAM;QACtB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,MAAM;QACf,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,iBAAiB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAAuB;IACnE,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mCAAmC;KACpC;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,0BAA0B;KAC3B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;KAC1B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,iBAAiB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,aAAa;IAC5B,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,OAAO;IACtB,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA4B;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,sBAAsB;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC;SAC9C;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,aAAa;QAC3B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,SAAS,CAAC;IACvE,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,gBAAgB;KACjB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,kCAAkC;QAClD,OAAO,EAAE,kCAAkC;QAC3C,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,SAAS;QAClB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE,qBAAqB;CAC9B,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,cAAc,EAAE,eAAe;QAC/B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC;SACnD;KACF;CACF,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n OperationParameter,\n OperationURLParameter,\n OperationQueryParameter,\n QueryCollectionFormat\n} from \"@azure/core-http\";\nimport {\n BlobServiceProperties as BlobServicePropertiesMapper,\n KeyInfo as KeyInfoMapper,\n QueryRequest as QueryRequestMapper,\n BlobTags as BlobTagsMapper,\n BlockLookupList as BlockLookupListMapper\n} from \"../models/mappers\";\n\nexport const contentType: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobServiceProperties: OperationParameter = {\n parameterPath: \"blobServiceProperties\",\n mapper: BlobServicePropertiesMapper\n};\n\nexport const accept: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const url: OperationURLParameter = {\n parameterPath: \"url\",\n mapper: {\n serializedName: \"url\",\n required: true,\n xmlName: \"url\",\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\n\nexport const restype: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"service\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"properties\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const timeoutInSeconds: OperationQueryParameter = {\n parameterPath: [\"options\", \"timeoutInSeconds\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"timeout\",\n xmlName: \"timeout\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const version: OperationParameter = {\n parameterPath: \"version\",\n mapper: {\n defaultValue: \"2023-11-03\",\n isConstant: true,\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const requestId: OperationParameter = {\n parameterPath: [\"options\", \"requestId\"],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const accept1: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp1: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"stats\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp2: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"list\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prefix: OperationQueryParameter = {\n parameterPath: [\"options\", \"prefix\"],\n mapper: {\n serializedName: \"prefix\",\n xmlName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const marker: OperationQueryParameter = {\n parameterPath: [\"options\", \"marker\"],\n mapper: {\n serializedName: \"marker\",\n xmlName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxPageSize: OperationQueryParameter = {\n parameterPath: [\"options\", \"maxPageSize\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"maxresults\",\n xmlName: \"maxresults\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const include: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListContainersIncludeType\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\"metadata\", \"deleted\", \"system\"]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const keyInfo: OperationParameter = {\n parameterPath: \"keyInfo\",\n mapper: KeyInfoMapper\n};\n\nexport const comp3: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"userdelegationkey\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype1: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"account\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const comp4: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"batch\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const contentLength: OperationParameter = {\n parameterPath: \"contentLength\",\n mapper: {\n serializedName: \"Content-Length\",\n required: true,\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const multipartContentType: OperationParameter = {\n parameterPath: \"multipartContentType\",\n mapper: {\n serializedName: \"Content-Type\",\n required: true,\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp5: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blobs\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const where: OperationQueryParameter = {\n parameterPath: [\"options\", \"where\"],\n mapper: {\n serializedName: \"where\",\n xmlName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype2: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"container\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const metadata: OperationParameter = {\n parameterPath: [\"options\", \"metadata\"],\n mapper: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\n\nexport const access: OperationParameter = {\n parameterPath: [\"options\", \"access\"],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n }\n};\n\nexport const defaultEncryptionScope: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const preventEncryptionScopeOverride: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const leaseId: OperationParameter = {\n parameterPath: [\"options\", \"leaseAccessConditions\", \"leaseId\"],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifModifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifModifiedSince\"],\n mapper: {\n serializedName: \"If-Modified-Since\",\n xmlName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const ifUnmodifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifUnmodifiedSince\"],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n xmlName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const comp6: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"metadata\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp7: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"acl\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const containerAcl: OperationParameter = {\n parameterPath: [\"options\", \"containerAcl\"],\n mapper: {\n serializedName: \"containerAcl\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n};\n\nexport const comp8: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"undelete\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerName: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerName\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n xmlName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerVersion: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerVersion\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n xmlName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp9: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"rename\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContainerName: OperationParameter = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n serializedName: \"x-ms-source-container-name\",\n required: true,\n xmlName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"sourceLeaseId\"],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n xmlName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp10: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"lease\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"acquire\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const duration: OperationParameter = {\n parameterPath: [\"options\", \"duration\"],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const proposedLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"proposedLeaseId\"],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action1: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"release\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const leaseId1: OperationParameter = {\n parameterPath: \"leaseId\",\n mapper: {\n serializedName: \"x-ms-lease-id\",\n required: true,\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action2: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"renew\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action3: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"break\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const breakPeriod: OperationParameter = {\n parameterPath: [\"options\", \"breakPeriod\"],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n xmlName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const action4: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"change\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const proposedLeaseId1: OperationParameter = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n required: true,\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const include1: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListBlobsIncludeItem\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\",\n \"immutabilitypolicy\",\n \"legalhold\",\n \"deletedwithversions\"\n ]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const delimiter: OperationQueryParameter = {\n parameterPath: \"delimiter\",\n mapper: {\n serializedName: \"delimiter\",\n required: true,\n xmlName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const snapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"snapshot\"],\n mapper: {\n serializedName: \"snapshot\",\n xmlName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const versionId: OperationQueryParameter = {\n parameterPath: [\"options\", \"versionId\"],\n mapper: {\n serializedName: \"versionid\",\n xmlName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const range: OperationParameter = {\n parameterPath: [\"options\", \"range\"],\n mapper: {\n serializedName: \"x-ms-range\",\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const rangeGetContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentMD5\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n xmlName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const rangeGetContentCRC64: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentCRC64\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n xmlName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionKey: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKey\"],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n xmlName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionKeySha256: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKeySha256\"],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionAlgorithm: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionAlgorithm\"],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n xmlName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifMatch\"],\n mapper: {\n serializedName: \"If-Match\",\n xmlName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifNoneMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifNoneMatch\"],\n mapper: {\n serializedName: \"If-None-Match\",\n xmlName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifTags: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifTags\"],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n xmlName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deleteSnapshots: OperationParameter = {\n parameterPath: [\"options\", \"deleteSnapshots\"],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n xmlName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\"include\", \"only\"]\n }\n }\n};\n\nexport const blobDeleteType: OperationQueryParameter = {\n parameterPath: [\"options\", \"blobDeleteType\"],\n mapper: {\n serializedName: \"deletetype\",\n xmlName: \"deletetype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp11: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"expiry\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiryOptions: OperationParameter = {\n parameterPath: \"expiryOptions\",\n mapper: {\n serializedName: \"x-ms-expiry-option\",\n required: true,\n xmlName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiresOn: OperationParameter = {\n parameterPath: [\"options\", \"expiresOn\"],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobCacheControl: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobCacheControl\"],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n xmlName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentType: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentType\"],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n xmlName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentMD5\"],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobContentEncoding: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentEncoding\"],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n xmlName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLanguage: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentLanguage\"],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n xmlName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentDisposition: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentDisposition\"],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n xmlName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp12: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"immutabilityPolicies\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const immutabilityPolicyExpiry: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyExpiry\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const immutabilityPolicyMode: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyMode\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n};\n\nexport const comp13: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"legalhold\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const legalHold: OperationParameter = {\n parameterPath: \"legalHold\",\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n required: true,\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionScope: OperationParameter = {\n parameterPath: [\"options\", \"encryptionScope\"],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp14: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"snapshot\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier: OperationParameter = {\n parameterPath: [\"options\", \"tier\"],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\",\n \"Cold\"\n ]\n }\n }\n};\n\nexport const rehydratePriority: OperationParameter = {\n parameterPath: [\"options\", \"rehydratePriority\"],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n }\n};\n\nexport const sourceIfModifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n xmlName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfUnmodifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n xmlName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfMatch: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfMatch\"],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n xmlName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfNoneMatch: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n xmlName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfTags: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfTags\"],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n xmlName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySource: OperationParameter = {\n parameterPath: \"copySource\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobTagsString: OperationParameter = {\n parameterPath: [\"options\", \"blobTagsString\"],\n mapper: {\n serializedName: \"x-ms-tags\",\n xmlName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sealBlob: OperationParameter = {\n parameterPath: [\"options\", \"sealBlob\"],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n xmlName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const legalHold1: OperationParameter = {\n parameterPath: [\"options\", \"legalHold\"],\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const xMsRequiresSync: OperationParameter = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n defaultValue: \"true\",\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentMD5\"],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n xmlName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const copySourceAuthorization: OperationParameter = {\n parameterPath: [\"options\", \"copySourceAuthorization\"],\n mapper: {\n serializedName: \"x-ms-copy-source-authorization\",\n xmlName: \"x-ms-copy-source-authorization\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceTags: OperationParameter = {\n parameterPath: [\"options\", \"copySourceTags\"],\n mapper: {\n serializedName: \"x-ms-copy-source-tag-option\",\n xmlName: \"x-ms-copy-source-tag-option\",\n type: {\n name: \"Enum\",\n allowedValues: [\"REPLACE\", \"COPY\"]\n }\n }\n};\n\nexport const comp15: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"copy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyActionAbortConstant: OperationParameter = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n defaultValue: \"abort\",\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyId: OperationQueryParameter = {\n parameterPath: \"copyId\",\n mapper: {\n serializedName: \"copyid\",\n required: true,\n xmlName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp16: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tier\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier1: OperationParameter = {\n parameterPath: \"tier\",\n mapper: {\n serializedName: \"x-ms-access-tier\",\n required: true,\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\",\n \"Cold\"\n ]\n }\n }\n};\n\nexport const queryRequest: OperationParameter = {\n parameterPath: [\"options\", \"queryRequest\"],\n mapper: QueryRequestMapper\n};\n\nexport const comp17: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"query\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp18: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tags\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tags: OperationParameter = {\n parameterPath: [\"options\", \"tags\"],\n mapper: BlobTagsMapper\n};\n\nexport const transactionalContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentMD5\"],\n mapper: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const transactionalContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobType: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"PageBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLength: OperationParameter = {\n parameterPath: \"blobContentLength\",\n mapper: {\n serializedName: \"x-ms-blob-content-length\",\n required: true,\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const blobSequenceNumber: OperationParameter = {\n parameterPath: [\"options\", \"blobSequenceNumber\"],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const contentType1: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/octet-stream\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body1: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const accept2: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp19: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"page\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const pageWrite: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"update\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThanOrEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n xmlName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThan: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n xmlName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n xmlName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const pageWrite1: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"clear\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceUrl: OperationParameter = {\n parameterPath: \"sourceUrl\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceRange: OperationParameter = {\n parameterPath: \"sourceRange\",\n mapper: {\n serializedName: \"x-ms-source-range\",\n required: true,\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n xmlName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const range1: OperationParameter = {\n parameterPath: \"range\",\n mapper: {\n serializedName: \"x-ms-range\",\n required: true,\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp20: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"pagelist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevsnapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"prevsnapshot\"],\n mapper: {\n serializedName: \"prevsnapshot\",\n xmlName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevSnapshotUrl: OperationParameter = {\n parameterPath: [\"options\", \"prevSnapshotUrl\"],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n xmlName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sequenceNumberAction: OperationParameter = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n serializedName: \"x-ms-sequence-number-action\",\n required: true,\n xmlName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\"max\", \"update\", \"increment\"]\n }\n }\n};\n\nexport const comp21: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"incrementalcopy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType1: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"AppendBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp22: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"appendblock\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxSize: OperationParameter = {\n parameterPath: [\"options\", \"appendPositionAccessConditions\", \"maxSize\"],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n xmlName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const appendPosition: OperationParameter = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n xmlName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const sourceRange1: OperationParameter = {\n parameterPath: [\"options\", \"sourceRange\"],\n mapper: {\n serializedName: \"x-ms-source-range\",\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp23: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"seal\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType2: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"BlockBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceBlobProperties: OperationParameter = {\n parameterPath: [\"options\", \"copySourceBlobProperties\"],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n xmlName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const comp24: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"block\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blockId: OperationQueryParameter = {\n parameterPath: \"blockId\",\n mapper: {\n serializedName: \"blockid\",\n required: true,\n xmlName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blocks: OperationParameter = {\n parameterPath: \"blocks\",\n mapper: BlockLookupListMapper\n};\n\nexport const comp25: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blocklist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const listType: OperationQueryParameter = {\n parameterPath: \"listType\",\n mapper: {\n defaultValue: \"committed\",\n serializedName: \"blocklisttype\",\n required: true,\n xmlName: \"blocklisttype\",\n type: {\n name: \"Enum\",\n allowedValues: [\"committed\", \"uncommitted\", \"all\"]\n }\n }\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js new file mode 100644 index 0000000..7507b15 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js @@ -0,0 +1,237 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a AppendBlob. */ +export class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType1 + ], + isXML: true, + serializer: xmlSerializer +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.maxSize, + Parameters.appendPosition + ], + mediaType: "binary", + serializer +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.transactionalContentMD5, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.maxSize, + Parameters.appendPosition, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.AppendBlobSealHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobSealExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.appendPosition + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=appendBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map new file mode 100644 index 0000000..662f001 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"appendBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/appendBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAanD,uCAAuC;AACvC,MAAM,OAAO,UAAU;IAGrB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACiB,CAAC;IACzC,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACiB,CAAC;IAC9C,CAAC;IAED;;;;;;;;OAQG;IACH,kBAAkB,CAChB,SAAiB,EACjB,aAAqB,EACrB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACiB,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,IAAI,CACF,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iBAAiB,CACiB,CAAC;IACvC,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,cAAc;KAC1B;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA2B;IAC9D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,YAAY;KACxB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iBAAiB,GAA2B;IAChD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,cAAc;KAC1B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse\n} from \"../models\";\n\n/** Class representing a AppendBlob. */\nexport class AppendBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class AppendBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: AppendBlobAppendBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(\n options?: AppendBlobSealOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n sealOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst appendBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.maxSize,\n Parameters.appendPosition\n ],\n mediaType: \"binary\",\n serializer\n};\nconst appendBlockFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.transactionalContentMD5,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.maxSize,\n Parameters.appendPosition,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst sealOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.AppendBlobSealHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobSealExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.appendPosition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js new file mode 100644 index 0000000..6810389 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js @@ -0,0 +1,1079 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Blob. */ +export class Blob { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.rangeGetContentMD5, + Parameters.rangeGetContentCRC64, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: Mappers.BlobGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.BlobDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.blobDeleteType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobUndeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobUndeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetExpiryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.expiryOptions, + Parameters.expiresOn + ], + isXML: true, + serializer: xmlSerializer +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifUnmodifiedSince, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.legalHold + ], + isXML: true, + serializer: xmlSerializer +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.tier, + Parameters.rehydratePriority, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sealBlob, + Parameters.legalHold1 + ], + isXML: true, + serializer: xmlSerializer +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.xMsRequiresSync, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags + ], + isXML: true, + serializer: xmlSerializer +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp15, + Parameters.copyId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetTierHeaders + }, + 202: { + headersMapper: Mappers.BlobSetTierHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp16 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags, + Parameters.rehydratePriority, + Parameters.tier1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobQueryExceptionHeaders + } + }, + requestBody: Parameters.queryRequest, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp17 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobTags, + headersMapper: Mappers.BlobGetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobSetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTagsExceptionHeaders + } + }, + requestBody: Parameters.tags, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifTags, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +//# sourceMappingURL=blob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map new file mode 100644 index 0000000..71e3c61 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"blob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/blob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAsDnD,iCAAiC;AACjC,MAAM,OAAO,IAAI;IAGf;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,QAAQ,CACN,OAAoC;QAEpC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAAyC;QAEzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACW,CAAC;IAC1C,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,OAAkC;QACvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACW,CAAC;IACnC,CAAC;IAED;;;OAGG;IACH,QAAQ,CACN,OAAoC;QAEpC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,SAAS,CACP,aAAgC,EAChC,OAAqC;QAErC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,sBAAsB,CACW,CAAC;IACtC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;OAGG;IACH,qBAAqB,CACnB,OAAiD;QAEjD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACW,CAAC;IAClD,CAAC;IAED;;;OAGG;IACH,wBAAwB,CACtB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACW,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,SAAkB,EAClB,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,WAAW,CACT,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,YAAY,CACV,OAAe,EACf,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,UAAU,CACR,OAAe,EACf,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;IACvC,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;IACvC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CACd,UAAkB,EAClB,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;IAC7C,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,UAAkB,EAClB,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;;;OAMG;IACH,gBAAgB,CACd,MAAc,EACd,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;IAC7C,CAAC;IAED;;;;;;;;OAQG;IACH,OAAO,CACL,IAAgB,EAChB,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,OAAiC;QACrC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kBAAkB,CACW,CAAC;IAClC,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,OAAmC;QACzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,OAAmC;QACzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,qBAAqB,GAA2B;IACpD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,oBAAoB;QAC/B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,wBAAwB;SAChD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iBAAiB;SACzC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,cAAc;KAC1B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qBAAqB,GAA2B;IACpD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,sBAAsB,GAA2B;IACrD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,oBAAoB;SAC5C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,6BAA6B;SACrD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;KAClC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yCAAyC;SACjE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;KAClC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;IAC5D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,cAAc;KAC1B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;IAC5D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,uBAAuB;KACnC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;KACjB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kBAAkB,GAA2B;IACjD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,gBAAgB;SACxC;QACD,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,gBAAgB;SACxC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,YAAY;IACpC,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;KACrC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobDownloadOptionalParams,\n BlobDownloadResponse,\n BlobGetPropertiesOptionalParams,\n BlobGetPropertiesResponse,\n BlobDeleteOptionalParams,\n BlobDeleteResponse,\n BlobUndeleteOptionalParams,\n BlobUndeleteResponse,\n BlobExpiryOptions,\n BlobSetExpiryOptionalParams,\n BlobSetExpiryResponse,\n BlobSetHttpHeadersOptionalParams,\n BlobSetHttpHeadersResponse,\n BlobSetImmutabilityPolicyOptionalParams,\n BlobSetImmutabilityPolicyResponse,\n BlobDeleteImmutabilityPolicyOptionalParams,\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetLegalHoldOptionalParams,\n BlobSetLegalHoldResponse,\n BlobSetMetadataOptionalParams,\n BlobSetMetadataResponse,\n BlobAcquireLeaseOptionalParams,\n BlobAcquireLeaseResponse,\n BlobReleaseLeaseOptionalParams,\n BlobReleaseLeaseResponse,\n BlobRenewLeaseOptionalParams,\n BlobRenewLeaseResponse,\n BlobChangeLeaseOptionalParams,\n BlobChangeLeaseResponse,\n BlobBreakLeaseOptionalParams,\n BlobBreakLeaseResponse,\n BlobCreateSnapshotOptionalParams,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLOptionalParams,\n BlobStartCopyFromURLResponse,\n BlobCopyFromURLOptionalParams,\n BlobCopyFromURLResponse,\n BlobAbortCopyFromURLOptionalParams,\n BlobAbortCopyFromURLResponse,\n AccessTier,\n BlobSetTierOptionalParams,\n BlobSetTierResponse,\n BlobGetAccountInfoResponse,\n BlobQueryOptionalParams,\n BlobQueryResponse,\n BlobGetTagsOptionalParams,\n BlobGetTagsResponse,\n BlobSetTagsOptionalParams,\n BlobSetTagsResponse\n} from \"../models\";\n\n/** Class representing a Blob. */\nexport class Blob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Blob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Download operation reads or downloads a blob from the system, including its metadata and\n * properties. You can also call Download to read a snapshot.\n * @param options The options parameters.\n */\n download(\n options?: BlobDownloadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n downloadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system\n * properties for the blob. It does not return the content of the blob.\n * @param options The options parameters.\n */\n getProperties(\n options?: BlobGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is\n * permanently removed from the storage account. If the storage account's soft delete feature is\n * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible\n * immediately. However, the blob service retains the blob or snapshot for the number of days specified\n * by the DeleteRetentionPolicy section of [Storage service properties]\n * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is\n * permanently removed from the storage account. Note that you continue to be charged for the\n * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the\n * \"include=deleted\" query parameter to discover which blobs and snapshots have been soft deleted. You\n * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a\n * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404\n * (ResourceNotFound).\n * @param options The options parameters.\n */\n delete(options?: BlobDeleteOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Undelete a blob that was previously soft deleted\n * @param options The options parameters.\n */\n undelete(\n options?: BlobUndeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n undeleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Sets the time a blob will expire and be deleted.\n * @param expiryOptions Required. Indicates mode of the expiry time\n * @param options The options parameters.\n */\n setExpiry(\n expiryOptions: BlobExpiryOptions,\n options?: BlobSetExpiryOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n expiryOptions,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setExpiryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set HTTP Headers operation sets system properties on the blob\n * @param options The options parameters.\n */\n setHttpHeaders(\n options?: BlobSetHttpHeadersOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setHttpHeadersOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Immutability Policy operation sets the immutability policy on the blob\n * @param options The options parameters.\n */\n setImmutabilityPolicy(\n options?: BlobSetImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Delete Immutability Policy operation deletes the immutability policy on the blob\n * @param options The options parameters.\n */\n deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Legal Hold operation sets a legal hold on the blob.\n * @param legalHold Specified if a legal hold should be set on the blob.\n * @param options The options parameters.\n */\n setLegalHold(\n legalHold: boolean,\n options?: BlobSetLegalHoldOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n legalHold,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setLegalHoldOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more\n * name-value pairs\n * @param options The options parameters.\n */\n setMetadata(\n options?: BlobSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n acquireLease(\n options?: BlobAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: BlobReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: BlobRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: BlobChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n breakLease(\n options?: BlobBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * The Create Snapshot operation creates a read-only snapshot of a blob\n * @param options The options parameters.\n */\n createSnapshot(\n options?: BlobCreateSnapshotOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createSnapshotOperationSpec\n ) as Promise;\n }\n\n /**\n * The Start Copy From URL operation copies a blob or an internet resource to a new blob.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n startCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return\n * a response until the copy is complete.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyFromURL(\n copySource: string,\n options?: BlobCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination\n * blob with zero length and full metadata.\n * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob\n * operation.\n * @param options The options parameters.\n */\n abortCopyFromURL(\n copyId: string,\n options?: BlobAbortCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copyId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n abortCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant storage only). A\n * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block\n * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's\n * ETag.\n * @param tier Indicates the tier to be set on the blob.\n * @param options The options parameters.\n */\n setTier(\n tier: AccessTier,\n options?: BlobSetTierOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n tier,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTierOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Query operation enables users to select/project on blob data by providing simple query\n * expressions.\n * @param options The options parameters.\n */\n query(options?: BlobQueryOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n queryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Tags operation enables users to get the tags associated with a blob.\n * @param options The options parameters.\n */\n getTags(options?: BlobGetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getTagsOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tags operation enables users to set tags on a blob.\n * @param options The options parameters.\n */\n setTags(options?: BlobSetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTagsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst downloadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDownloadExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.rangeGetContentMD5,\n Parameters.rangeGetContentCRC64,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"HEAD\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.blobDeleteType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.deleteSnapshots\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst undeleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobUndeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobUndeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setExpiryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetExpiryExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.expiryOptions,\n Parameters.expiresOn\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setHttpHeadersOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetHttpHeadersHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifUnmodifiedSince,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setLegalHoldOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetLegalHoldHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.legalHold\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetMetadataExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst createSnapshotOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst startCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.tier,\n Parameters.rehydratePriority,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sealBlob,\n Parameters.legalHold1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.xMsRequiresSync,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst abortCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp15,\n Parameters.copyId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.copyActionAbortConstant\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTierOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n 202: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTierExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp16\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.rehydratePriority,\n Parameters.tier1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst queryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobQueryExceptionHeaders\n }\n },\n requestBody: Parameters.queryRequest,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp17\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobTags,\n headersMapper: Mappers.BlobGetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetTagsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobSetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTagsExceptionHeaders\n }\n },\n requestBody: Parameters.tags,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js new file mode 100644 index 0000000..266219b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js @@ -0,0 +1,392 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a BlockBlob. */ +export class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobUploadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobUploadExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.transactionalContentMD5, + Parameters.blobType2, + Parameters.copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: Parameters.blocks, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlockList, + headersMapper: Mappers.BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp25, + Parameters.listType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=blockBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map new file mode 100644 index 0000000..f9b2325 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"blockBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/blockBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAmBnD,sCAAsC;AACtC,MAAM,OAAO,SAAS;IAGpB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;;;OAQG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA8B,EAC9B,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;;;;;;;;;OAYG;IACH,cAAc,CACZ,aAAqB,EACrB,UAAkB,EAClB,OAA+C;QAE/C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;IAChD,CAAC;IAED;;;;;;;;OAQG;IACH,UAAU,CACR,OAAe,EACf,aAAqB,EACrB,IAA8B,EAC9B,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;;;;;;OASG;IACH,iBAAiB,CACf,OAAe,EACf,aAAqB,EACrB,SAAiB,EACjB,OAAkD;QAElD,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACgB,CAAC;IACnD,CAAC;IAED;;;;;;;;;;OAUG;IACH,eAAe,CACb,MAAuB,EACvB,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;;;;OAMG;IACH,YAAY,CACV,QAAuB,EACvB,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,QAAQ;YACR,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,wBAAwB;KACpC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;KACnB;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA2B;IAC7D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0CAA0C;SAClE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,YAAY;KACxB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,MAAM;IAC9B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;KACrC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,SAAS;YAC7B,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlockBlobUploadOptionalParams,\n BlockBlobUploadResponse,\n BlockBlobPutBlobFromUrlOptionalParams,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobStageBlockOptionalParams,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLOptionalParams,\n BlockBlobStageBlockFromURLResponse,\n BlockLookupList,\n BlockBlobCommitBlockListOptionalParams,\n BlockBlobCommitBlockListResponse,\n BlockListType,\n BlockBlobGetBlockListOptionalParams,\n BlockBlobGetBlockListResponse\n} from \"../models\";\n\n/** Class representing a BlockBlob. */\nexport class BlockBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class BlockBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing\n * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put\n * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a\n * partial update of the content of a block blob, use the Put Block List operation.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n upload(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobUploadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read\n * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are\n * not supported with Put Blob from URL; the content of an existing blob is overwritten with the\n * content of the new blob. To perform partial updates to a block blob’s contents using a source URL,\n * use the Put Block from URL API in conjunction with Put Block List.\n * @param contentLength The length of the request.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n putBlobFromUrl(\n contentLength: number,\n copySource: string,\n options?: BlockBlobPutBlobFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n putBlobFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n stageBlock(\n blockId: string,\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobStageBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob where the contents\n * are read from a URL.\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param sourceUrl Specify a URL to the copy source.\n * @param options The options parameters.\n */\n stageBlockFromURL(\n blockId: string,\n contentLength: number,\n sourceUrl: string,\n options?: BlockBlobStageBlockFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n sourceUrl,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the\n * blob. In order to be written as part of a blob, a block must have been successfully written to the\n * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading\n * only those blocks that have changed, then committing the new and existing blocks together. You can\n * do this by specifying whether to commit a block from the committed block list or from the\n * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list\n * it may belong to.\n * @param blocks Blob Blocks.\n * @param options The options parameters.\n */\n commitBlockList(\n blocks: BlockLookupList,\n options?: BlockBlobCommitBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blocks,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n commitBlockListOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block\n * blob\n * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted\n * blocks, or both lists together.\n * @param options The options parameters.\n */\n getBlockList(\n listType: BlockListType,\n options?: BlockBlobGetBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n listType,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getBlockListOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst uploadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobUploadExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.blobType2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst putBlobFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags,\n Parameters.transactionalContentMD5,\n Parameters.blobType2,\n Parameters.copySourceBlobProperties\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst stageBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst stageBlockFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst commitBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders\n }\n },\n requestBody: Parameters.blocks,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlockList,\n headersMapper: Mappers.BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp25,\n Parameters.listType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js new file mode 100644 index 0000000..17f5d4c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js @@ -0,0 +1,769 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Container. */ +export class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.access, + Parameters.defaultEncryptionScope, + Parameters.preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteOperationSpec = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.ContainerDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerDeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer +}; +const setMetadataOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp6 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: Mappers.ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: Parameters.containerAcl, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.access, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerRestoreHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp8 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.deletedContainerName, + Parameters.deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenameHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp9 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.sourceContainerName, + Parameters.sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp4, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1 + ], + isXML: true, + serializer: xmlSerializer +}; +const renewLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2 + ], + isXML: true, + serializer: xmlSerializer +}; +const breakLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod + ], + isXML: true, + serializer: xmlSerializer +}; +const changeLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsFlatSegmentResponse, + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1, + Parameters.delimiter + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=container.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map new file mode 100644 index 0000000..1e99010 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map @@ -0,0 +1 @@ +{"version":3,"file":"container.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/container.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAwCnD,sCAAsC;AACtC,MAAM,OAAO,SAAS;IAGpB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA8C;QAE9C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACgB,CAAC;IAC/C,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;OAGG;IACH,WAAW,CACT,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;OAGG;IACH,OAAO,CACL,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACgB,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,mBAA2B,EAC3B,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,mBAAmB;YACnB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,WAAW,CACT,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;IAED;;;;;OAKG;IACH,YAAY,CACV,OAAe,EACf,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;IAED;;;;;OAKG;IACH,UAAU,CACR,OAAe,EACf,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,mBAAmB,CACjB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,gCAAgC,CACgB,CAAC;IACrD,CAAC;IAED;;;;;;;OAOG;IACH,wBAAwB,CACtB,SAAiB,EACjB,OAAyD;QAEzD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACgB,CAAC;IAC1D,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;IAChD,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,8BAA8B;KAC1C;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,6BAA6B;SACrD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,sCAAsC;SAC9D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,SAAS,EAAE,kBAAkB,EAAE;qBAC3D;iBACF;gBACD,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,kBAAkB;aACnC;YACD,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,YAAY;IACpC,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,oBAAoB;QAC/B,UAAU,CAAC,uBAAuB;KACnC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,aAAa;KACzB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;KACpB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;KACvB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;KAC5B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,gCAAgC,GAA2B;IAC/D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,4BAA4B;YAChD,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iCAAiC;YACrD,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,iDAAiD;SACzE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n ContainerCreateOptionalParams,\n ContainerCreateResponse,\n ContainerGetPropertiesOptionalParams,\n ContainerGetPropertiesResponse,\n ContainerDeleteOptionalParams,\n ContainerDeleteResponse,\n ContainerSetMetadataOptionalParams,\n ContainerSetMetadataResponse,\n ContainerGetAccessPolicyOptionalParams,\n ContainerGetAccessPolicyResponse,\n ContainerSetAccessPolicyOptionalParams,\n ContainerSetAccessPolicyResponse,\n ContainerRestoreOptionalParams,\n ContainerRestoreResponse,\n ContainerRenameOptionalParams,\n ContainerRenameResponse,\n ContainerSubmitBatchOptionalParams,\n ContainerSubmitBatchResponse,\n ContainerFilterBlobsOptionalParams,\n ContainerFilterBlobsResponse,\n ContainerAcquireLeaseOptionalParams,\n ContainerAcquireLeaseResponse,\n ContainerReleaseLeaseOptionalParams,\n ContainerReleaseLeaseResponse,\n ContainerRenewLeaseOptionalParams,\n ContainerRenewLeaseResponse,\n ContainerBreakLeaseOptionalParams,\n ContainerBreakLeaseResponse,\n ContainerChangeLeaseOptionalParams,\n ContainerChangeLeaseResponse,\n ContainerListBlobFlatSegmentOptionalParams,\n ContainerListBlobFlatSegmentResponse,\n ContainerListBlobHierarchySegmentOptionalParams,\n ContainerListBlobHierarchySegmentResponse,\n ContainerGetAccountInfoResponse\n} from \"../models\";\n\n/** Class representing a Container. */\nexport class Container {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Container class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * creates a new container under the specified account. If the container with the same name already\n * exists, the operation fails\n * @param options The options parameters.\n */\n create(\n options?: ContainerCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * returns all user-defined metadata and system properties for the specified container. The data\n * returned does not include the container's list of blobs\n * @param options The options parameters.\n */\n getProperties(\n options?: ContainerGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * operation marks the specified container for deletion. The container and any blobs contained within\n * it are later deleted during garbage collection\n * @param options The options parameters.\n */\n delete(\n options?: ContainerDeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * operation sets one or more user-defined name-value pairs for the specified container.\n * @param options The options parameters.\n */\n setMetadata(\n options?: ContainerSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the permissions for the specified container. The permissions indicate whether container data\n * may be accessed publicly.\n * @param options The options parameters.\n */\n getAccessPolicy(\n options?: ContainerGetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * sets the permissions for the specified container. The permissions indicate whether blobs in a\n * container may be accessed publicly.\n * @param options The options parameters.\n */\n setAccessPolicy(\n options?: ContainerSetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * Restores a previously-deleted container.\n * @param options The options parameters.\n */\n restore(\n options?: ContainerRestoreOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n restoreOperationSpec\n ) as Promise;\n }\n\n /**\n * Renames an existing container.\n * @param sourceContainerName Required. Specifies the name of the container to rename.\n * @param options The options parameters.\n */\n rename(\n sourceContainerName: string,\n options?: ContainerRenameOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceContainerName,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renameOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ContainerSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given\n * search expression. Filter blobs searches within the given container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ContainerFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n acquireLease(\n options?: ContainerAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: ContainerReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: ContainerRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n breakLease(\n options?: ContainerBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: ContainerChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param options The options parameters.\n */\n listBlobFlatSegment(\n options?: ContainerListBlobFlatSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobFlatSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix\n * element in the response body that acts as a placeholder for all blobs whose names begin with the\n * same substring up to the appearance of the delimiter character. The delimiter may be a single\n * character or a string.\n * @param options The options parameters.\n */\n listBlobHierarchySegment(\n delimiter: string,\n options?: ContainerListBlobHierarchySegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n delimiter,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobHierarchySegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.access,\n Parameters.defaultEncryptionScope,\n Parameters.preventEncryptionScopeOverride\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerDeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetMetadataExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp6\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: {\n name: \"Sequence\",\n element: {\n type: { name: \"Composite\", className: \"SignedIdentifier\" }\n }\n },\n serializedName: \"SignedIdentifiers\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\"\n },\n headersMapper: Mappers.ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders\n }\n },\n requestBody: Parameters.containerAcl,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.access,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst restoreOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerRestoreHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRestoreExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp8\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.deletedContainerName,\n Parameters.deletedContainerVersion\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renameOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenameHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenameExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp9\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.sourceContainerName,\n Parameters.sourceLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp4,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ContainerFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobFlatSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsFlatSegmentResponse,\n headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1,\n Parameters.delimiter\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js new file mode 100644 index 0000000..eca2687 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js @@ -0,0 +1,14 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./service"; +export * from "./container"; +export * from "./blob"; +export * from "./pageBlob"; +export * from "./appendBlob"; +export * from "./blockBlob"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map new file mode 100644 index 0000000..6377015 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,QAAQ,CAAC;AACvB,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./service\";\nexport * from \"./container\";\nexport * from \"./blob\";\nexport * from \"./pageBlob\";\nexport * from \"./appendBlob\";\nexport * from \"./blockBlob\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js new file mode 100644 index 0000000..a014a5a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js @@ -0,0 +1,494 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a PageBlob. */ +export class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType, + Parameters.blobContentLength, + Parameters.blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobClearPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.pageWrite1 + ], + isXML: true, + serializer: xmlSerializer +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.sourceUrl, + Parameters.sourceRange, + Parameters.sourceContentCrc64, + Parameters.range1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20, + Parameters.prevsnapshot + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobResizeHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobResizeExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.blobContentLength + ], + isXML: true, + serializer: xmlSerializer +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobSequenceNumber, + Parameters.sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.copySource + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=pageBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map new file mode 100644 index 0000000..8877c19 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pageBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/pageBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAwBnD,qCAAqC;AACrC,MAAM,OAAO,QAAQ;IAGnB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;OAMG;IACH,MAAM,CACJ,aAAqB,EACrB,iBAAyB,EACzB,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,iBAAiB;YACjB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACe,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,aAAqB,EACrB,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACe,CAAC;IAC3C,CAAC;IAED;;;;;;;;;;OAUG;IACH,kBAAkB,CAChB,SAAiB,EACjB,WAAmB,EACnB,aAAqB,EACrB,KAAa,EACb,OAAkD;QAElD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,WAAW;YACX,aAAa;YACb,KAAK;YACL,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACe,CAAC;IACnD,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACe,CAAC;IAC9C,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CACf,OAAiD;QAEjD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACe,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,MAAM,CACJ,iBAAyB,EACzB,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,iBAAiB;YACjB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;IACvC,CAAC;IAED;;;;;;OAMG;IACH,oBAAoB,CAClB,oBAA8C,EAC9C,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,oBAAoB;YACpB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACe,CAAC;IACrD,CAAC;IAED;;;;;;;;;;;OAWG;IACH,eAAe,CACb,UAAkB,EAClB,OAA+C;QAE/C,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACe,CAAC;IAChD,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,kBAAkB;KAC9B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;KACnC;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,+BAA+B,GAA2B;IAC9D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0CAA0C;SAClE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,8BAA8B,GAA2B;IAC7D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yCAAyC;SACjE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,YAAY;KACxB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;IAChE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n PageBlobCreateOptionalParams,\n PageBlobCreateResponse,\n PageBlobUploadPagesOptionalParams,\n PageBlobUploadPagesResponse,\n PageBlobClearPagesOptionalParams,\n PageBlobClearPagesResponse,\n PageBlobUploadPagesFromURLOptionalParams,\n PageBlobUploadPagesFromURLResponse,\n PageBlobGetPageRangesOptionalParams,\n PageBlobGetPageRangesResponse,\n PageBlobGetPageRangesDiffOptionalParams,\n PageBlobGetPageRangesDiffResponse,\n PageBlobResizeOptionalParams,\n PageBlobResizeResponse,\n SequenceNumberActionType,\n PageBlobUpdateSequenceNumberOptionalParams,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobCopyIncrementalOptionalParams,\n PageBlobCopyIncrementalResponse\n} from \"../models\";\n\n/** Class representing a PageBlob. */\nexport class PageBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class PageBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create operation creates a new page blob.\n * @param contentLength The length of the request.\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n blobContentLength: number,\n options?: PageBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n uploadPages(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: PageBlobUploadPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Clear Pages operation clears a set of pages from a page blob\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n clearPages(\n contentLength: number,\n options?: PageBlobClearPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n clearPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a\n * URL\n * @param sourceUrl Specify a URL to the copy source.\n * @param sourceRange Bytes of source data in the specified range. The length of this range should\n * match the ContentLength header and x-ms-range/Range destination range header.\n * @param contentLength The length of the request.\n * @param range The range of bytes to which the source range would be written. The range should be 512\n * aligned and range-end is required.\n * @param options The options parameters.\n */\n uploadPagesFromURL(\n sourceUrl: string,\n sourceRange: string,\n contentLength: number,\n range: string,\n options?: PageBlobUploadPagesFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n sourceRange,\n contentLength,\n range,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a\n * page blob\n * @param options The options parameters.\n */\n getPageRanges(\n options?: PageBlobGetPageRangesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were\n * changed between target blob and previous snapshot.\n * @param options The options parameters.\n */\n getPageRangesDiff(\n options?: PageBlobGetPageRangesDiffOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesDiffOperationSpec\n ) as Promise;\n }\n\n /**\n * Resize the Blob\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n resize(\n blobContentLength: number,\n options?: PageBlobResizeOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n resizeOperationSpec\n ) as Promise;\n }\n\n /**\n * Update the sequence number of the blob\n * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.\n * This property applies to page blobs only. This property indicates how the service should modify the\n * blob's sequence number\n * @param options The options parameters.\n */\n updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n options?: PageBlobUpdateSequenceNumberOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sequenceNumberAction,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n updateSequenceNumberOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.\n * The snapshot is copied such that only the differential changes between the previously copied\n * snapshot are transferred to the destination. The copied snapshots are complete copies of the\n * original snapshot and can be read or copied from as usual. This API is supported since REST version\n * 2016-05-31.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyIncremental(\n copySource: string,\n options?: PageBlobCopyIncrementalOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyIncrementalOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType,\n Parameters.blobContentLength,\n Parameters.blobSequenceNumber\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo\n ],\n mediaType: \"binary\",\n serializer\n};\nconst clearPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobClearPagesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.pageWrite1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.sourceUrl,\n Parameters.sourceRange,\n Parameters.sourceContentCrc64,\n Parameters.range1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20,\n Parameters.prevsnapshot\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.prevSnapshotUrl\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst resizeOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobResizeHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobResizeExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.blobContentLength\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobSequenceNumber,\n Parameters.sequenceNumberAction\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyIncrementalOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.copySource\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js new file mode 100644 index 0000000..106bca8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js @@ -0,0 +1,345 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Service. */ +export class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: Parameters.blobServiceProperties, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceProperties, + headersMapper: Mappers.ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceStatistics, + headersMapper: Mappers.ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListContainersSegmentResponse, + headersMapper: Mappers.ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.include + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.UserDelegationKey, + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: Parameters.keyInfo, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp3 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +const submitBatchOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const filterBlobsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=service.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map new file mode 100644 index 0000000..2826430 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map @@ -0,0 +1 @@ +{"version":3,"file":"service.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/service.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAsBnD,oCAAoC;AACpC,MAAM,OAAO,OAAO;IAGlB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;OAKG;IACH,aAAa,CACX,qBAA4C,EAC5C,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,qBAAqB;YACrB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,aAAa,CACX,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,qBAAqB,CACnB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACc,CAAC;IACrD,CAAC;IAED;;;;;OAKG;IACH,oBAAoB,CAClB,OAAgB,EAChB,OAAmD;QAEnD,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACc,CAAC;IACpD,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACc,CAAC;IAC9C,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACc,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,WAAW,CACT,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACc,CAAC;IAC3C,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,qBAAqB;IAC7C,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,gBAAgB;KAC5B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,qBAAqB;YACzC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,gBAAgB;KAC5B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,qBAAqB;YACzC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,6BAA6B;YACjD,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;IAChE,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2CAA2C;SACnE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,OAAO;IAC/B,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse\n} from \"../models\";\n\n/** Class representing a Service. */\nexport class Service {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Service class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobServiceProperties,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getStatisticsOperationSpec\n ) as Promise;\n }\n\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listContainersSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n keyInfo,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getUserDelegationKeyOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ServiceSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst setPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders\n }\n },\n requestBody: Parameters.blobServiceProperties,\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceProperties,\n headersMapper: Mappers.ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getStatisticsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceStatistics,\n headersMapper: Mappers.ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listContainersSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListContainersSegmentResponse,\n headersMapper: Mappers.ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.include\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getUserDelegationKeyOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.UserDelegationKey,\n headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders\n }\n },\n requestBody: Parameters.keyInfo,\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp3\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js new file mode 100644 index 0000000..dfcaa04 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js @@ -0,0 +1,27 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import { Service, Container, Blob, PageBlob, AppendBlob, BlockBlob } from "./operations"; +import { StorageClientContext } from "./storageClientContext"; +export class StorageClient extends StorageClientContext { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + super(url, options); + this.service = new Service(this); + this.container = new Container(this); + this.blob = new Blob(this); + this.pageBlob = new PageBlob(this); + this.appendBlob = new AppendBlob(this); + this.blockBlob = new BlockBlob(this); + } +} +//# sourceMappingURL=storageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map new file mode 100644 index 0000000..1163d8e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"storageClient.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClient.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,OAAO,EACP,SAAS,EACT,IAAI,EACJ,QAAQ,EACR,UAAU,EACV,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAG9D,MAAM,OAAO,aAAc,SAAQ,oBAAoB;IACrD;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACpB,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC;QAC3B,IAAI,CAAC,QAAQ,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;IACvC,CAAC;CAQF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n Service,\n Container,\n Blob,\n PageBlob,\n AppendBlob,\n BlockBlob\n} from \"./operations\";\nimport { StorageClientContext } from \"./storageClientContext\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nexport class StorageClient extends StorageClientContext {\n /**\n * Initializes a new instance of the StorageClient class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n super(url, options);\n this.service = new Service(this);\n this.container = new Container(this);\n this.blob = new Blob(this);\n this.pageBlob = new PageBlob(this);\n this.appendBlob = new AppendBlob(this);\n this.blockBlob = new BlockBlob(this);\n }\n\n service: Service;\n container: Container;\n blob: Blob;\n pageBlob: PageBlob;\n appendBlob: AppendBlob;\n blockBlob: BlockBlob;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js new file mode 100644 index 0000000..d9c7793 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js @@ -0,0 +1,39 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +const packageName = "azure-storage-blob"; +const packageVersion = "12.17.0"; +export class StorageClientContext extends coreHttp.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2023-11-03"; + } +} +//# sourceMappingURL=storageClientContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map new file mode 100644 index 0000000..3145f45 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map @@ -0,0 +1 @@ +{"version":3,"file":"storageClientContext.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClientContext.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAG7C,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEjC,MAAM,OAAO,oBAAqB,SAAQ,QAAQ,CAAC,aAAa;IAI9D;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;SACzC;QAED,0CAA0C;QAC1C,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,GAAG,WAAW,IAAI,cAAc,IAAI,gBAAgB,EAAE,CAAC;SAC5E;QAED,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAE1B,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;QAE3C,wBAAwB;QACxB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QAEf,0CAA0C;QAC1C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;IACjD,CAAC;CACF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.17.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2023-11-03\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js new file mode 100644 index 0000000..e561b8e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +export var KnownEncryptionAlgorithmType; +(function (KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(KnownEncryptionAlgorithmType || (KnownEncryptionAlgorithmType = {})); +//# sourceMappingURL=generatedModels.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map new file mode 100644 index 0000000..e3603c9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generatedModels.js","sourceRoot":"","sources":["../../../src/generatedModels.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkQlC,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAEX;AAFD,WAAY,4BAA4B;IACtC,iDAAiB,CAAA;AACnB,CAAC,EAFW,4BAA4B,KAA5B,4BAA4B,QAEvC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Tags } from \".\";\nimport { BlobTags, BlobPropertiesInternal as BlobProperties } from \"./generated/src/models\";\n\nexport {\n AccessPolicy,\n AccessTier,\n AccountKind,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlHeaders,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockHeaders,\n AppendBlobCreateHeaders,\n ArchiveStatus,\n BlobDeleteImmutabilityPolicyHeaders,\n BlobDeleteImmutabilityPolicyResponse,\n BlobImmutabilityPolicyMode,\n BlobAbortCopyFromURLHeaders,\n BlobCopyFromURLHeaders,\n BlobCopySourceTags,\n BlobCreateSnapshotHeaders,\n BlobDeleteHeaders,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobGetPropertiesHeaders,\n BlobGetPropertiesResponse as BlobGetPropertiesResponseModel,\n BlobPropertiesInternal as BlobProperties,\n BlobUndeleteResponse,\n BlobHttpHeaders as BlobHTTPHeaders,\n BlobSetHttpHeadersHeaders as BlobSetHTTPHeadersHeaders,\n BlobSetHttpHeadersResponse as BlobSetHTTPHeadersResponse,\n BlobSetImmutabilityPolicyHeaders,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldHeaders,\n BlobSetLegalHoldResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLHeaders,\n BlobStartCopyFromURLResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobSetMetadataHeaders,\n BlobSetTierHeaders,\n BlobSetTierResponse,\n BlobSetTagsHeaders,\n BlobDownloadHeaders,\n BlobDownloadResponse as BlobDownloadResponseModel,\n BlobType,\n BlobTags,\n BlobUndeleteHeaders,\n Block,\n BlockBlobCommitBlockListHeaders,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListHeaders,\n BlockBlobStageBlockFromURLHeaders,\n BlockBlobStageBlockHeaders,\n BlockList,\n BlockListType,\n BlockBlobGetBlockListResponse,\n BlobServiceProperties,\n BlobServiceStatistics,\n BlobGetTagsHeaders,\n BlobTag,\n ContainerCreateHeaders,\n ContainerCreateResponse,\n ContainerDeleteHeaders,\n ContainerDeleteResponse,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesHeaders,\n ContainerBreakLeaseOptionalParams,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerGetPropertiesResponse,\n ContainerProperties,\n ContainerSetMetadataResponse,\n ContainerSetAccessPolicyHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataHeaders,\n CopyStatusType,\n CorsRule,\n CpkInfo,\n DeleteSnapshotsOptionType,\n EncryptionAlgorithmType,\n GeoReplication,\n GeoReplicationStatusType,\n LeaseAccessConditions,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n ListContainersSegmentResponse,\n FilterBlobItem as FilterBlobItemModel,\n FilterBlobSegment as FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n Logging,\n Metrics,\n ModifiedAccessConditions as ModifiedAccessConditionsModel,\n PublicAccessType,\n PageBlobCreateResponse,\n PageBlobUploadPagesResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobClearPagesHeaders,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalHeaders,\n PageBlobCreateHeaders,\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesResponse as PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesDiffResponse as PageBlobGetPageRangesDiffResponseModel,\n PageBlobResizeHeaders,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberHeaders,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLHeaders,\n PageBlobUploadPagesHeaders,\n PageBlobCopyIncrementalResponse,\n SequenceNumberActionType,\n RehydratePriority,\n RetentionPolicy,\n AppendPositionAccessConditions,\n ServiceGetUserDelegationKeyHeaders,\n ServiceSubmitBatchHeaders,\n ServiceGetAccountInfoHeaders,\n ServiceGetPropertiesHeaders,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsHeaders,\n SequenceNumberAccessConditions,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentHeaders,\n ServiceListContainersSegmentResponse,\n ServiceSetPropertiesHeaders,\n SkuName,\n StaticWebsite,\n ContainerItem,\n ServiceSubmitBatchResponse as ServiceSubmitBatchResponseModel,\n ServiceSubmitBatchOptionalParams as ServiceSubmitBatchOptionalParamsModel,\n SignedIdentifier as SignedIdentifierModel,\n SyncCopyStatusType,\n UserDelegationKey as UserDelegationKeyModel,\n ContainerEncryptionScope,\n BlobQueryHeaders,\n BlobQueryResponse as BlobQueryResponseModel,\n ContainerRestoreResponse as ContainerUndeleteResponse,\n ContainerRestoreHeaders as ContainerUndeleteHeaders,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobPutBlobFromUrlHeaders,\n ContainerRenameResponse,\n ContainerRenameHeaders,\n} from \"./generated/src/models\";\n\n// Following definitions are to avoid breaking change.\nexport interface BlobPrefix {\n name: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegmentModel {\n blobItems: BlobItemInternal[];\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegmentModel {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\n/**\n * Blob info from a {@link BlobServiceClient.findBlobsByTags}\n */\nexport interface FilterBlobItem {\n /**\n * Blob Name.\n */\n name: string;\n\n /**\n * Container Name.\n */\n containerName: string;\n\n /**\n * Blob Tags.\n */\n tags?: Tags;\n\n /**\n * Tag value.\n *\n * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags.\n */\n tagValue: string;\n}\n\n/**\n * Segment response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\nexport interface PageRangeInfo {\n start: number;\n end: number;\n isClear: boolean;\n}\n\n/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */\nexport enum KnownEncryptionAlgorithmType {\n AES256 = \"AES256\",\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js new file mode 100644 index 0000000..93dc89c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { RestError } from "@azure/core-http"; +export * from "./BlobServiceClient"; +export * from "./Clients"; +export * from "./ContainerClient"; +export * from "./BlobLeaseClient"; +export * from "./BlobBatch"; +export * from "./BlobBatchClient"; +export * from "./BatchResponse"; +export * from "./StorageBrowserPolicyFactory"; +export * from "./credentials/AnonymousCredential"; +export * from "./credentials/Credential"; +export { BlockBlobTier, PremiumPageBlobTier, } from "./models"; +export * from "./Pipeline"; +export * from "./policies/AnonymousCredentialPolicy"; +export * from "./policies/CredentialPolicy"; +export * from "./StorageRetryPolicyFactory"; +export * from "./generatedModels"; +export { RestError }; +export { logger } from "./log"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map new file mode 100644 index 0000000..c9629eb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AAGzC,OAAO,EACL,aAAa,EAEb,mBAAmB,GAOpB,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAE5C,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n BlobImmutabilityPolicy,\n PremiumPageBlobTier,\n Tags,\n TagConditions,\n ContainerRequestConditions,\n HttpAuthorization,\n ModificationConditions,\n MatchConditions,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js new file mode 100644 index 0000000..36c63bb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { RestError } from "@azure/core-http"; +export * from "./BlobServiceClient"; +export * from "./Clients"; +export * from "./ContainerClient"; +export * from "./BlobLeaseClient"; +export * from "./sas/AccountSASPermissions"; +export * from "./sas/AccountSASResourceTypes"; +export * from "./sas/AccountSASServices"; +export * from "./sas/AccountSASSignatureValues"; +export * from "./BlobBatch"; +export * from "./BlobBatchClient"; +export * from "./BatchResponse"; +export * from "./sas/BlobSASPermissions"; +export * from "./sas/BlobSASSignatureValues"; +export * from "./StorageBrowserPolicyFactory"; +export * from "./sas/ContainerSASPermissions"; +export * from "./credentials/AnonymousCredential"; +export * from "./credentials/Credential"; +export * from "./credentials/StorageSharedKeyCredential"; +export { BlockBlobTier, PremiumPageBlobTier, StorageBlobAudience, getBlobServiceAccountAudience, } from "./models"; +export * from "./Pipeline"; +export * from "./policies/AnonymousCredentialPolicy"; +export * from "./policies/CredentialPolicy"; +export * from "./StorageRetryPolicyFactory"; +export * from "./policies/StorageSharedKeyCredentialPolicy"; +export * from "./sas/SASQueryParameters"; +export * from "./generatedModels"; +export { RestError }; +export { logger } from "./log"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map new file mode 100644 index 0000000..0f32ad9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAG7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,0BAA0B,CAAC;AACzC,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,8BAA8B,CAAC;AAC7C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AACzC,cAAc,0CAA0C,CAAC;AAGzD,OAAO,EACL,aAAa,EACb,mBAAmB,EAUnB,mBAAmB,EAEnB,6BAA6B,GAC9B,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6CAA6C,CAAC;AAC5D,cAAc,0BAA0B,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAYlC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport { PollOperationState, PollerLike } from \"@azure/core-lro\";\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./sas/AccountSASPermissions\";\nexport * from \"./sas/AccountSASResourceTypes\";\nexport * from \"./sas/AccountSASServices\";\nexport * from \"./sas/AccountSASSignatureValues\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./sas/BlobSASPermissions\";\nexport * from \"./sas/BlobSASSignatureValues\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./sas/ContainerSASPermissions\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport * from \"./credentials/StorageSharedKeyCredential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n PremiumPageBlobTier,\n Tags,\n BlobDownloadResponseParsed,\n BlobImmutabilityPolicy,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n BlobQueryArrowField,\n BlobQueryArrowFieldType,\n HttpAuthorization,\n StorageBlobAudience,\n PollerLikeWithCancellation,\n getBlobServiceAccountAudience,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport * from \"./policies/StorageSharedKeyCredentialPolicy\";\nexport * from \"./sas/SASQueryParameters\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport {\n AppendBlobRequestConditions,\n BlobRequestConditions,\n Metadata,\n PageBlobRequestConditions,\n TagConditions,\n ContainerRequestConditions,\n ModificationConditions,\n MatchConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\nexport {\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js new file mode 100644 index 0000000..a746f21 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + */ +export const logger = createClientLogger("storage-blob"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map new file mode 100644 index 0000000..839d2f0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,cAAc,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n */\nexport const logger = createClientLogger(\"storage-blob\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js new file mode 100644 index 0000000..f4d4017 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EncryptionAlgorithmAES25 } from "./utils/constants"; +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export var BlockBlobTier; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed. + */ + BlockBlobTier["Cold"] = "Cold"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(BlockBlobTier || (BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export var PremiumPageBlobTier; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(PremiumPageBlobTier || (PremiumPageBlobTier = {})); +export function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +export function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +export var StorageBlobAudience; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(StorageBlobAudience || (StorageBlobAudience = {})); +export function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; +} +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map new file mode 100644 index 0000000..f48f9a0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAalC,OAAO,EAAE,wBAAwB,EAAE,MAAM,mBAAmB,CAAC;AAyF7D;;;GAGG;AACH,MAAM,CAAN,IAAY,aAkBX;AAlBD,WAAY,aAAa;IACvB;;OAEG;IACH,4BAAW,CAAA;IACX;;OAEG;IACH,8BAAa,CAAA;IACb;;OAEG;IACH,8BAAa,CAAA;IACb;;;OAGG;IACH,oCAAmB,CAAA;AACrB,CAAC,EAlBW,aAAa,KAAb,aAAa,QAkBxB;AAED;;;;GAIG;AACH,MAAM,CAAN,IAAY,mBA6CX;AA7CD,WAAY,mBAAmB;IAC7B;;OAEG;IACH,gCAAS,CAAA;IACT;;OAEG;IACH,gCAAS,CAAA;IACT;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;AACb,CAAC,EA7CW,mBAAmB,KAAnB,mBAAmB,QA6C9B;AAED,MAAM,UAAU,YAAY,CAC1B,IAA8D;IAE9D,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,OAAO,IAAkB,CAAC,CAAC,qGAAqG;AAClI,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,GAAwB,EAAE,OAAgB;IAC7E,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;QACnB,MAAM,IAAI,UAAU,CAAC,2DAA2D,CAAC,CAAC;KACnF;IAED,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,mBAAmB,EAAE;QACnC,GAAG,CAAC,mBAAmB,GAAG,wBAAwB,CAAC;KACpD;AACH,CAAC;AA2HD;;GAEG;AACH,MAAM,CAAN,IAAY,mBASX;AATD,WAAY,mBAAmB;IAC7B;;OAEG;IACH,gFAAyD,CAAA;IACzD;;OAEG;IACH,yFAAkE,CAAA;AACpE,CAAC,EATW,mBAAmB,KAAnB,mBAAmB,QAS9B;AAED,MAAM,UAAU,6BAA6B,CAAC,kBAA0B;IACtE,OAAO,WAAW,kBAAkB,iCAAiC,CAAC;AACxE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress, PollOperationState } from \"@azure/core-lro\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\nimport {\n LeaseAccessConditions,\n SequenceNumberAccessConditions,\n AppendPositionAccessConditions,\n AccessTier,\n CpkInfo,\n BlobDownloadResponseModel,\n} from \"./generatedModels\";\nimport { EncryptionAlgorithmAES25 } from \"./utils/constants\";\n\n/**\n * Blob tags.\n */\nexport type Tags = Record;\n\n/**\n * A map of name-value pairs to associate with the resource.\n */\nexport interface Metadata {\n /**\n * A name-value pair.\n */\n [propertyName: string]: string;\n}\n\n/**\n * standard HTTP conditional headers and tags condition.\n */\nexport interface ModifiedAccessConditions\n extends MatchConditions,\n ModificationConditions,\n TagConditions {}\n\n/**\n * standard HTTP conditional headers, tags condition and lease condition\n */\nexport interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions {}\n\n/**\n * Conditions to add to the creation of this page blob.\n */\nexport interface PageBlobRequestConditions\n extends BlobRequestConditions,\n SequenceNumberAccessConditions {}\n\n/**\n * Conditions to add to the creation of this append blob.\n */\nexport interface AppendBlobRequestConditions\n extends BlobRequestConditions,\n AppendPositionAccessConditions {}\n\n/**\n * Specifies HTTP options for conditional requests based on modification time.\n */\nexport interface ModificationConditions {\n /**\n * Specify this header value to operate only on a blob if it has been modified since the\n * specified date/time.\n */\n ifModifiedSince?: Date;\n /**\n * Specify this header value to operate only on a blob if it has not been modified since the\n * specified date/time.\n */\n ifUnmodifiedSince?: Date;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on ETag matching.\n */\nexport interface MatchConditions {\n /**\n * Specify an ETag value to operate only on blobs with a matching value.\n */\n ifMatch?: string;\n /**\n * Specify an ETag value to operate only on blobs without a matching value.\n */\n ifNoneMatch?: string;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on blob tags.\n */\nexport interface TagConditions {\n /**\n * Optional SQL statement to apply to the tags of the blob.\n */\n tagConditions?: string;\n}\n\n/**\n * Conditions to meet for the container.\n */\nexport interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions {}\n\n/**\n * Represents the access tier on a blob.\n * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}\n */\nexport enum BlockBlobTier {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n Hot = \"Hot\",\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n Cool = \"Cool\",\n /**\n * Optimized for storing data that is rarely accessed.\n */\n Cold = \"Cold\",\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n Archive = \"Archive\",\n}\n\n/**\n * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.\n * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}\n * for detailed information on the corresponding IOPS and throughput per PageBlobTier.\n */\nexport enum PremiumPageBlobTier {\n /**\n * P4 Tier.\n */\n P4 = \"P4\",\n /**\n * P6 Tier.\n */\n P6 = \"P6\",\n /**\n * P10 Tier.\n */\n P10 = \"P10\",\n /**\n * P15 Tier.\n */\n P15 = \"P15\",\n /**\n * P20 Tier.\n */\n P20 = \"P20\",\n /**\n * P30 Tier.\n */\n P30 = \"P30\",\n /**\n * P40 Tier.\n */\n P40 = \"P40\",\n /**\n * P50 Tier.\n */\n P50 = \"P50\",\n /**\n * P60 Tier.\n */\n P60 = \"P60\",\n /**\n * P70 Tier.\n */\n P70 = \"P70\",\n /**\n * P80 Tier.\n */\n P80 = \"P80\",\n}\n\nexport function toAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string | undefined\n): AccessTier | undefined {\n if (tier === undefined) {\n return undefined;\n }\n\n return tier as AccessTier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\n\nexport function ensureCpkIfSpecified(cpk: CpkInfo | undefined, isHttps: boolean): void {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Specifies the Replication Status of a blob. This is used when a storage account has\n * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}.\n */\nexport type ObjectReplicationStatus = \"complete\" | \"failed\";\n\n/**\n * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob.\n * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}.\n */\nexport interface ObjectReplicationRule {\n /**\n * The Object Replication Rule ID.\n */\n ruleId: string;\n\n /**\n * The Replication Status\n */\n replicationStatus: ObjectReplicationStatus;\n}\n\n/**\n * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}.\n * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the\n * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses\n * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}.\n */\nexport interface ObjectReplicationPolicy {\n /**\n * The Object Replication Policy ID.\n */\n policyId: string;\n\n /**\n * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID.\n */\n rules: ObjectReplicationRule[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadResponseParsed extends BlobDownloadResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * The type of a {@link BlobQueryArrowField}.\n */\nexport type BlobQueryArrowFieldType =\n | \"int64\"\n | \"bool\"\n | \"timestamp[ms]\"\n | \"string\"\n | \"double\"\n | \"decimal\";\n\n/**\n * Describe a field in {@link BlobQueryArrowConfiguration}.\n */\nexport interface BlobQueryArrowField {\n /**\n * The type of the field.\n */\n type: BlobQueryArrowFieldType;\n\n /**\n * The name of the field.\n */\n name?: string;\n\n /**\n * The precision of the field. Required if type is \"decimal\".\n */\n precision?: number;\n\n /**\n * The scale of the field. Required if type is is \"decimal\".\n */\n scale?: number;\n}\n\n/**\n * Describe immutable policy for blob.\n */\nexport interface BlobImmutabilityPolicy {\n /**\n * Specifies the date time when the blobs immutability policy is set to expire.\n */\n expiriesOn?: Date;\n /**\n * Specifies the immutability policy mode to set on the blob.\n */\n policyMode?: BlobImmutabilityPolicyMode;\n}\n\n/**\n * Represents authentication information in Authorization, ProxyAuthorization,\n * WWW-Authenticate, and Proxy-Authenticate header values.\n */\nexport interface HttpAuthorization {\n /**\n * The scheme to use for authorization.\n */\n scheme: string;\n\n /**\n * the credentials containing the authentication information of the user agent for the resource being requested.\n */\n value: string;\n}\n\n/**\n * Defines the known cloud audiences for Storage.\n */\nexport enum StorageBlobAudience {\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Storage.\n */\n StorageOAuthScopes = \"https://storage.azure.com/.default\",\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Disk.\n */\n DiskComputeOAuthScopes = \"https://disk.compute.azure.com/.default\",\n}\n\nexport function getBlobServiceAccountAudience(storageAccountName: string): string {\n return `https://${storageAccountName}.blob.core.windows.net/.default`;\n}\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\nexport interface PollerLikeWithCancellation, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js new file mode 100644 index 0000000..12270db --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CredentialPolicy } from "./CredentialPolicy"; +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} +//# sourceMappingURL=AnonymousCredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map new file mode 100644 index 0000000..ecb1efb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AnonymousCredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/AnonymousCredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;;GAGG;AACH,MAAM,OAAO,yBAA0B,SAAQ,gBAAgB;IAC7D;;;;OAIG;IACH,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nexport class AnonymousCredentialPolicy extends CredentialPolicy {\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js new file mode 100644 index 0000000..a2f1fcc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy } from "@azure/core-http"; +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} +//# sourceMappingURL=CredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map new file mode 100644 index 0000000..1c1ba7e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"CredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/CredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAsC,MAAM,kBAAkB,CAAC;AAEzF;;;GAGG;AACH,MAAM,OAAgB,gBAAiB,SAAQ,iBAAiB;IAC9D;;;;OAIG;IACI,WAAW,CAAC,OAAoB;QACrC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACjE,CAAC;IAED;;;;;OAKG;IACO,WAAW,CAAC,OAAoB;QACxC,4EAA4E;QAC5E,qCAAqC;QACrC,OAAO,OAAO,CAAC;IACjB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BaseRequestPolicy, HttpOperationResponse, WebResource } from \"@azure/core-http\";\n\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nexport abstract class CredentialPolicy extends BaseRequestPolicy {\n /**\n * Sends out request.\n *\n * @param request -\n */\n public sendRequest(request: WebResource): Promise {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n }\n\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js new file mode 100644 index 0000000..a272017 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { URLBuilder } from "@azure/core-http"; +import { BaseRequestPolicy, } from "@azure/core-http"; +import { delay } from "@azure/core-http"; +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} +//# sourceMappingURL=StorageBearerTokenChallengeAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map new file mode 100644 index 0000000..2992be8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBearerTokenChallengeAuthenticationPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageBearerTokenChallengeAuthenticationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAiD,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC7F,OAAO,EACL,iBAAiB,GAIlB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC;;GAEG;AACH,MAAM,SAAS,GAAG;IAChB,YAAY,EAAE,WAAW;IACzB;;OAEG;IACH,eAAe,EAAE;QACf;;WAEG;QACH,aAAa,EAAE,eAAe;KAC/B;CACF,CAAC;AAiCF,sDAAsD;AACtD,MAAM,sBAAsB,GAAuB;IACjD,uBAAuB,EAAE,IAAI;IAC7B,iBAAiB,EAAE,IAAI;IACvB,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC,EAAE,oCAAoC;CACvE,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB;IAEnB,4EAA4E;IAC5E,eAAe;IACf,KAAK,UAAU,iBAAiB;QAC9B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;aAC/B;YAAC,WAAM;gBACN,OAAO,IAAI,CAAC;aACb;SACF;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;YAE1C,6CAA6C;YAC7C,IAAI,UAAU,KAAK,IAAI,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,OAAO,UAAU,CAAC;SACnB;IACH,CAAC;IAED,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;QACrB,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;QAE/B,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;KACnC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;IAErC,MAAM,OAAO,mCACR,sBAAsB,GACtB,kBAAkB,CACtB,CAAC;IAEF;;;OAGG;IACH,MAAM,MAAM,GAAG;QACb;;WAEG;QACH,IAAI,YAAY;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;QAChC,CAAC;QACD;;;WAGG;QACH,IAAI,aAAa;;YACf,OAAO,CACL,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,CAAC,CAAC,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1E,CAAC;QACJ,CAAC;QACD;;;WAGG;QACH,IAAI,WAAW;YACb,OAAO,CACL,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1F,CAAC;QACJ,CAAC;KACF,CAAC;IAEF;;;OAGG;IACH,SAAS,OAAO,CAAC,eAAgC;;QAC/C,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;YACxB,yDAAyD;YACzD,MAAM,iBAAiB,GAAG,GAAgC,EAAE,CAC1D,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/C,wEAAwE;YACxE,6CAA6C;YAC7C,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;YACzB,+DAA+D;YAC/D,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,IAAI,CAAC,GAAG,EAAE,CACxC;iBACE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC,CAAC;iBACD,KAAK,CAAC,CAAC,MAAM,EAAE,EAAE;gBAChB,sEAAsE;gBACtE,qEAAqE;gBACrE,mBAAmB;gBACnB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;gBACb,MAAM,MAAM,CAAC;YACf,CAAC,CAAC,CAAC;SACN;QAED,OAAO,aAAqC,CAAC;IAC/C,CAAC;IAED,OAAO,KAAK,EAAE,YAA6B,EAAwB,EAAE;QACnE,EAAE;QACF,gBAAgB;QAChB,+DAA+D;QAC/D,6CAA6C;QAC7C,+DAA+D;QAC/D,yCAAyC;QACzC,6DAA6D;QAC7D,YAAY;QACZ,EAAE;QAEF,IAAI,MAAM,CAAC,WAAW;YAAE,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;SACvB;QAED,OAAO,KAAoB,CAAC;IAC9B,CAAC,CAAC;AACJ,CAAC;AACD;;;GAGG;AACH,SAAS,YAAY,CAAC,QAA+B;IACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;IAC3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE;QACxC,OAAO,SAAS,CAAC;KAClB;IACD,OAAO;AACT,CAAC;AAUD;;;;;GAKG;AACH,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,eAAe,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC1D,MAAM,cAAc,GAAG,GAAG,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;IAChF,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpD,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACnE,CAAC;IACF,mCAAmC;IACnC,OAAO,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,iCAAM,CAAC,GAAK,CAAC,EAAG,EAAE,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,aAAa;AAEb;;;;;;GAMG;AAEH,MAAM,UAAU,+CAA+C,CAC7D,UAA2B,EAC3B,MAAyB;IAEzB,wFAAwF;IACxF,IAAI,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAErD,MAAM,+CAAgD,SAAQ,iBAAiB;QAC7E,YAAmB,UAAyB,EAAE,OAA6B;YACzE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC7B,CAAC;QAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;YACnD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;gBACzD,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;aACH;YAED,MAAM,gBAAgB,GAAG,QAAQ,CAAC;YAClC,MAAM,KAAK,GAAG,CACZ,MAAM,gBAAgB,CAAC;gBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;gBACpC,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;iBAC3C;aACF,CAAC,CACH,CAAC,KAAK,CAAC;YACR,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,UAAU,KAAK,EAAE,CAAC,CAAC;YAEpF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAEjE,IAAI,CAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,MAAM,MAAK,GAAG,EAAE;gBAC5B,MAAM,SAAS,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;gBACzC,IAAI,SAAS,EAAE;oBACb,MAAM,aAAa,GAAc,cAAc,CAAC,SAAS,CAAC,CAAC;oBAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,WAAW,GAAG,SAAS,CAAC,YAAY,CAAC;oBAC3E,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAC;oBACxE,MAAM,YAAY,GAAG,aAAa,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;oBACzD,MAAM,QAAQ,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;oBACjC,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,UAAU,EAAE,eAAe,CAAC,CAAC;oBAE5E,MAAM,iBAAiB,GAAG,CACxB,MAAM,oBAAoB,CAAC;wBACzB,WAAW,EAAE,WAAW,CAAC,WAAW;wBACpC,cAAc,EAAE;4BACd,cAAc,EAAE,WAAW,CAAC,cAAc;yBAC3C;wBACD,QAAQ,EAAE,QAAQ;qBACnB,CAAC,CACH,CAAC,KAAK,CAAC;oBAER,QAAQ,GAAG,oBAAoB,CAAC;oBAChC,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,SAAS,CAAC,eAAe,CAAC,aAAa,EACvC,UAAU,iBAAiB,EAAE,CAC9B,CAAC;oBACF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBAClD;aACF;YAED,OAAO,QAAQ,CAAC;QAClB,CAAC;KACF;IAED,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,+CAA+C,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAClF,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential, URLBuilder } from \"@azure/core-http\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"@azure/core-http\";\nimport { HttpOperationResponse } from \"@azure/core-http\";\nimport { WebResourceLike } from \"@azure/core-http\";\nimport { delay } from \"@azure/core-http\";\n\n/**\n * A set of constants used internally when processing requests.\n */\nconst Constants = {\n DefaultScope: \"/.default\",\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n },\n};\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nconst DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n/**\n * We will retrieve the challenge only if the response status code was 401,\n * and if the response contained the header \"WWW-Authenticate\" with a non-empty value.\n */\nfunction getChallenge(response: HttpOperationResponse): string | undefined {\n const challenge = response.headers.get(\"WWW-Authenticate\");\n if (response.status === 401 && challenge) {\n return challenge;\n }\n return;\n}\n\n/**\n * Challenge structure\n */\ninterface Challenge {\n authorization_uri: string;\n resource_id: string;\n}\n\n/**\n * Converts: `Bearer a=\"b\" c=\"d\"`.\n * Into: `[ { a: 'b', c: 'd' }]`.\n *\n * @internal\n */\nfunction parseChallenge(challenge: string): any {\n const bearerChallenge = challenge.slice(\"Bearer \".length);\n const challengeParts = `${bearerChallenge.trim()} `.split(\" \").filter((x) => x);\n const keyValuePairs = challengeParts.map((keyValue) =>\n (([key, value]) => ({ [key]: value }))(keyValue.trim().split(\"=\"))\n );\n // Key-value pairs to plain object:\n return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {});\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\n\nexport function storageBearerTokenChallengeAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n let getToken = createTokenCycler(credential, scopes);\n\n class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const getTokenInternal = getToken;\n const token = (\n await getTokenInternal({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n })\n ).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n\n const response = await this._nextPolicy.sendRequest(webResource);\n\n if (response?.status === 401) {\n const challenge = getChallenge(response);\n if (challenge) {\n const challengeInfo: Challenge = parseChallenge(challenge);\n const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope;\n const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri);\n const pathSegments = parsedAuthUri.getPath()!.split(\"/\");\n const tenantId = pathSegments[1];\n const getTokenForChallenge = createTokenCycler(credential, challengeScopes);\n\n const tokenForChallenge = (\n await getTokenForChallenge({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n tenantId: tenantId,\n })\n ).token;\n\n getToken = getTokenForChallenge;\n webResource.headers.set(\n Constants.HeaderConstants.AUTHORIZATION,\n `Bearer ${tokenForChallenge}`\n );\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return response;\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js new file mode 100644 index 0000000..ad35d50 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, isNode, } from "@azure/core-http"; +import { HeaderConstants, URLConstants } from "../utils/constants"; +import { setURLParameter } from "../utils/utils.common"; +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=StorageBrowserPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map new file mode 100644 index 0000000..0916fcf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBrowserPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageBrowserPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EAEjB,MAAM,GAIP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AACnE,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAExD;;;;;;;;;;GAUG;AACH,MAAM,OAAO,oBAAqB,SAAQ,iBAAiB;IACzD;;;;OAIG;IACH,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,MAAM,EAAE;YACV,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;YACrF,OAAO,CAAC,GAAG,GAAG,eAAe,CAC3B,OAAO,CAAC,GAAG,EACX,YAAY,CAAC,UAAU,CAAC,sBAAsB,EAC9C,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAChC,CAAC;SACH;QAED,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;QAE/C,oFAAoF;QACpF,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;QAEvD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants, URLConstants } from \"../utils/constants\";\nimport { setURLParameter } from \"../utils/utils.common\";\n\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nexport class StorageBrowserPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n return this._nextPolicy.sendRequest(request);\n }\n\n if (request.method.toUpperCase() === \"GET\" || request.method.toUpperCase() === \"HEAD\") {\n request.url = setURLParameter(\n request.url,\n URLConstants.Parameters.FORCE_BROWSER_NO_CACHE,\n new Date().getTime().toString()\n );\n }\n\n request.headers.remove(HeaderConstants.COOKIE);\n\n // According to XHR standards, content-length should be fully controlled by browsers\n request.headers.remove(HeaderConstants.CONTENT_LENGTH);\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js new file mode 100644 index 0000000..0400373 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { BaseRequestPolicy, } from "@azure/core-http"; +import { URLConstants } from "../utils/constants"; +import { delay, setURLHost, setURLParameter } from "../utils/utils.common"; +import { logger } from "../log"; +/** + * A factory method used to generated a RetryPolicy factory. + * + * @param retryOptions - + */ +export function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + }, + }; +} +/** + * RetryPolicy types. + */ +export var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export class StorageRetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} +//# sourceMappingURL=StorageRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map new file mode 100644 index 0000000..11f74f2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageRetryPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,OAAO,EAEL,iBAAiB,GAOlB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CAAC,YAAkC;IACtE,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAsB,EAAE;YACvF,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACnE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAN,IAAY,sBASX;AATD,WAAY,sBAAsB;IAChC;;OAEG;IACH,iFAAW,CAAA;IACX;;OAEG;IACH,qEAAK,CAAA;AACP,CAAC,EATW,sBAAsB,KAAtB,sBAAsB,QASjC;AAED,wCAAwC;AACxC,MAAM,qBAAqB,GAAwB;IACjD,iBAAiB,EAAE,GAAG,GAAG,IAAI;IAC7B,QAAQ,EAAE,CAAC;IACX,cAAc,EAAE,CAAC,GAAG,IAAI;IACxB,eAAe,EAAE,sBAAsB,CAAC,WAAW;IACnD,aAAa,EAAE,EAAE;IACjB,cAAc,EAAE,SAAS,EAAE,2CAA2C;CACvE,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;AAEvE;;GAEG;AACH,MAAM,OAAO,kBAAmB,SAAQ,iBAAiB;IAMvD;;;;;;OAMG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,eAAoC,qBAAqB;QAEzD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAE3B,2BAA2B;QAC3B,IAAI,CAAC,YAAY,GAAG;YAClB,eAAe,EAAE,YAAY,CAAC,eAAe;gBAC3C,CAAC,CAAC,YAAY,CAAC,eAAe;gBAC9B,CAAC,CAAC,qBAAqB,CAAC,eAAe;YAEzC,QAAQ,EACN,YAAY,CAAC,QAAQ,IAAI,YAAY,CAAC,QAAQ,IAAI,CAAC;gBACjD,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC;gBACnC,CAAC,CAAC,qBAAqB,CAAC,QAAQ;YAEpC,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;gBAC7D,CAAC,CAAC,YAAY,CAAC,cAAc;gBAC7B,CAAC,CAAC,qBAAqB,CAAC,cAAc;YAE1C,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;gBAC7D,CAAC,CAAC,IAAI,CAAC,GAAG,CACN,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB;oBAC5B,CAAC,CAAC,YAAY,CAAC,iBAAiB;oBAChC,CAAC,CAAC,qBAAqB,CAAC,iBAAkB,CAC7C;gBACH,CAAC,CAAC,qBAAqB,CAAC,cAAc;YAE1C,iBAAiB,EACf,YAAY,CAAC,iBAAiB,IAAI,YAAY,CAAC,iBAAiB,IAAI,CAAC;gBACnE,CAAC,CAAC,YAAY,CAAC,iBAAiB;gBAChC,CAAC,CAAC,qBAAqB,CAAC,iBAAiB;YAE7C,aAAa,EAAE,YAAY,CAAC,aAAa;gBACvC,CAAC,CAAC,YAAY,CAAC,aAAa;gBAC5B,CAAC,CAAC,qBAAqB,CAAC,aAAa;SACxC,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;IACpD,CAAC;IAED;;;;;;;;;OASG;IACO,KAAK,CAAC,kBAAkB,CAChC,OAAoB,EACpB,eAAwB,EACxB,OAAe;QAEf,MAAM,UAAU,GAAgB,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhD,MAAM,cAAc,GAClB,eAAe;YACf,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa;YAChC,CAAC,CAAC,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,CAAC;YACxF,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;QAEpB,IAAI,CAAC,cAAc,EAAE;YACnB,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,aAAc,CAAC,CAAC;SAC/E;QAED,kEAAkE;QAClE,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE;YACpC,UAAU,CAAC,GAAG,GAAG,eAAe,CAC9B,UAAU,CAAC,GAAG,EACd,YAAY,CAAC,UAAU,CAAC,OAAO,EAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,cAAe,GAAG,IAAI,CAAC,CAAC,QAAQ,EAAE,CAChE,CAAC;SACH;QAED,IAAI,QAA2C,CAAC;QAChD,IAAI;YACF,MAAM,CAAC,IAAI,CAAC,2BAA2B,OAAO,IAAI,cAAc,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;YAC9F,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC1D,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACxD,OAAO,QAAQ,CAAC;aACjB;YAED,eAAe,GAAG,eAAe,IAAI,CAAC,CAAC,cAAc,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC;SACnF;QAAC,OAAO,GAAQ,EAAE;YACjB,MAAM,CAAC,KAAK,CAAC,uCAAuC,GAAG,CAAC,OAAO,WAAW,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;YACtF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,EAAE;gBAC7D,MAAM,GAAG,CAAC;aACX;SACF;QAED,MAAM,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;;OAOG;IACO,WAAW,CACnB,cAAuB,EACvB,OAAe,EACf,QAAgC,EAChC,GAAe;QAEf,IAAI,OAAO,IAAI,IAAI,CAAC,YAAY,CAAC,QAAS,EAAE;YAC1C,MAAM,CAAC,IAAI,CACT,2BAA2B,OAAO,gBAAgB,IAAI,CAAC,YAAY;iBAChE,QAAS,mBAAmB,CAChC,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QAED,iFAAiF;QACjF,uBAAuB;QACvB,MAAM,eAAe,GAAG;YACtB,WAAW;YACX,iBAAiB;YACjB,cAAc;YACd,YAAY;YACZ,QAAQ;YACR,WAAW;YACX,SAAS;YACT,OAAO;YACP,oBAAoB,EAAE,2DAA2D;SAClF,CAAC;QACF,IAAI,GAAG,EAAE;YACP,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IACE,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAC/C,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAClD,CAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,KAAK,cAAc,CAAC,EAClE;oBACA,MAAM,CAAC,IAAI,CAAC,8BAA8B,cAAc,qBAAqB,CAAC,CAAC;oBAC/E,OAAO,IAAI,CAAC;iBACb;aACF;SACF;QAED,kFAAkF;QAClF,gFAAgF;QAChF,gEAAgE;QAChE,IAAI,QAAQ,IAAI,GAAG,EAAE;YACnB,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YACzE,IAAI,CAAC,cAAc,IAAI,UAAU,KAAK,GAAG,EAAE;gBACzC,MAAM,CAAC,IAAI,CAAC,qDAAqD,CAAC,CAAC;gBACnE,OAAO,IAAI,CAAC;aACb;YAED,0CAA0C;YAC1C,IAAI,UAAU,KAAK,GAAG,IAAI,UAAU,KAAK,GAAG,EAAE;gBAC5C,MAAM,CAAC,IAAI,CAAC,2CAA2C,UAAU,GAAG,CAAC,CAAC;gBACtE,OAAO,IAAI,CAAC;aACb;SACF;QAED,IAAI,CAAA,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,MAAK,aAAa,KAAI,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,OAAO,CAAC,UAAU,CAAC,iCAAiC,CAAC,CAAA,EAAE;YAC7F,MAAM,CAAC,IAAI,CACT,iFAAiF,CAClF,CAAC;YACF,OAAO,IAAI,CAAC;SACb;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;OAMG;IACK,KAAK,CAAC,KAAK,CAAC,cAAuB,EAAE,OAAe,EAAE,WAA6B;QACzF,IAAI,aAAa,GAAW,CAAC,CAAC;QAE9B,IAAI,cAAc,EAAE;YAClB,QAAQ,IAAI,CAAC,YAAY,CAAC,eAAe,EAAE;gBACzC,KAAK,sBAAsB,CAAC,WAAW;oBACrC,aAAa,GAAG,IAAI,CAAC,GAAG,CACtB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,EAClE,IAAI,CAAC,YAAY,CAAC,iBAAkB,CACrC,CAAC;oBACF,MAAM;gBACR,KAAK,sBAAsB,CAAC,KAAK;oBAC/B,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,CAAC;oBAClD,MAAM;aACT;SACF;aAAM;YACL,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;SACtC;QAED,MAAM,CAAC,IAAI,CAAC,0BAA0B,aAAa,IAAI,CAAC,CAAC;QACzD,OAAO,KAAK,CAAC,aAAa,EAAE,WAAW,EAAE,iBAAiB,CAAC,CAAC;IAC9D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\n\nimport {\n AbortSignalLike,\n BaseRequestPolicy,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RestError,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { StorageRetryOptions } from \"../StorageRetryPolicyFactory\";\nimport { URLConstants } from \"../utils/constants\";\nimport { delay, setURLHost, setURLParameter } from \"../utils/utils.common\";\nimport { logger } from \"../log\";\n\n/**\n * A factory method used to generated a RetryPolicy factory.\n *\n * @param retryOptions -\n */\nexport function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {\n return new StorageRetryPolicy(nextPolicy, options, retryOptions);\n },\n };\n}\n\n/**\n * RetryPolicy types.\n */\nexport enum StorageRetryPolicyType {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n EXPONENTIAL,\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n FIXED,\n}\n\n// Default values of StorageRetryOptions\nconst DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined, // Use server side default timeout strategy\n};\n\nconst RETRY_ABORT_ERROR = new AbortError(\"The operation was aborted.\");\n\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nexport class StorageRetryPolicy extends BaseRequestPolicy {\n /**\n * RetryOptions.\n */\n private readonly retryOptions: StorageRetryOptions;\n\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS\n ) {\n super(nextPolicy, options);\n\n // Initialize retry options\n this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n\n maxTries:\n retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n\n tryTimeoutInMs:\n retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n\n retryDelayInMs:\n retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!\n )\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n\n maxRetryDelayInMs:\n retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost,\n };\n }\n\n /**\n * Sends request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n return this.attemptSendRequest(request, false, 1);\n }\n\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n protected async attemptSendRequest(\n request: WebResource,\n secondaryHas404: boolean,\n attempt: number\n ): Promise {\n const newRequest: WebResource = request.clone();\n\n const isPrimaryRetry =\n secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);\n }\n\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(\n newRequest.url,\n URLConstants.Parameters.TIMEOUT,\n Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()\n );\n }\n\n let response: HttpOperationResponse | undefined;\n try {\n logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? \"Primary\" : \"Secondary\"}`);\n response = await this._nextPolicy.sendRequest(newRequest);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return response;\n }\n\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n } catch (err: any) {\n logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {\n throw err;\n }\n }\n\n await this.delay(isPrimaryRetry, attempt, request.abortSignal);\n return this.attemptSendRequest(request, secondaryHas404, ++attempt);\n }\n\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n protected shouldRetry(\n isPrimaryRetry: boolean,\n attempt: number,\n response?: HttpOperationResponse,\n err?: RestError\n ): boolean {\n if (attempt >= this.retryOptions.maxTries!) {\n logger.info(\n `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions\n .maxTries!}, no further try.`\n );\n return false;\n }\n\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n const retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\", // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (const retriableError of retriableErrors) {\n if (\n err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)\n ) {\n logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);\n return true;\n }\n }\n }\n\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n const statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(`RetryPolicy: Secondary access with 404, will retry.`);\n return true;\n }\n\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);\n return true;\n }\n }\n\n if (err?.code === \"PARSE_ERROR\" && err?.message.startsWith(`Error \"Error: Unclosed root tag`)) {\n logger.info(\n \"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\"\n );\n return true;\n }\n\n return false;\n }\n\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {\n let delayTimeInMs: number = 0;\n\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min(\n (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,\n this.retryOptions.maxRetryDelayInMs!\n );\n break;\n case StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs!;\n break;\n }\n } else {\n delayTimeInMs = Math.random() * 1000;\n }\n\n logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);\n return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js new file mode 100644 index 0000000..ac9b2ce --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HeaderConstants } from "../utils/constants"; +import { getURLPath, getURLQueries } from "../utils/utils.common"; +import { CredentialPolicy } from "./CredentialPolicy"; +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} +//# sourceMappingURL=StorageSharedKeyCredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map new file mode 100644 index 0000000..a421e65 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageSharedKeyCredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAClE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;GAEG;AACH,MAAM,OAAO,gCAAiC,SAAQ,gBAAgB;IAMpE;;;;;OAKG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,OAAmC;QAEnC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED;;;;OAIG;IACO,WAAW,CAAC,OAAoB;QACxC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QAEzE,IACE,OAAO,CAAC,IAAI;YACZ,CAAC,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAK,OAAO,CAAC,IAAe,KAAK,SAAS,CAAC;YAC5E,OAAO,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EACvB;YACA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,cAAc,EAAE,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;SACtF;QAED,MAAM,YAAY,GAChB;YACE,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE;YAC5B,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,cAAc,CAAC;YAClE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC;YAC/D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,YAAY,CAAC;YAChE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,CAAC;YACxD,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,iBAAiB,CAAC;YACrE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC5D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,aAAa,CAAC;YACjE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,mBAAmB,CAAC;YACvE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC;SAC1D,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,IAAI;YACJ,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC;YAC3C,IAAI,CAAC,8BAA8B,CAAC,OAAO,CAAC,CAAC;QAE/C,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;QACvE,OAAO,CAAC,OAAO,CAAC,GAAG,CACjB,eAAe,CAAC,aAAa,EAC7B,aAAa,IAAI,CAAC,OAAO,CAAC,WAAW,IAAI,SAAS,EAAE,CACrD,CAAC;QAEF,uCAAuC;QACvC,0DAA0D;QAC1D,mEAAmE;QACnE,+EAA+E;QAC/E,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;OAMG;IACK,oBAAoB,CAAC,OAAoB,EAAE,UAAkB;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QAE9C,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,CAAC;SACX;QAED,0EAA0E;QAC1E,sEAAsE;QACtE,yFAAyF;QACzF,IAAI,UAAU,KAAK,eAAe,CAAC,cAAc,IAAI,KAAK,KAAK,GAAG,EAAE;YAClE,OAAO,EAAE,CAAC;SACX;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;;;;;OAYG;IACK,6BAA6B,CAAC,OAAoB;QACxD,IAAI,YAAY,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,EAAE;YACjE,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,kBAAkB,CAAC,CAAC;QACjF,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAU,EAAE;YACjC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAClE,CAAC,CAAC,CAAC;QAEH,2BAA2B;QAC3B,YAAY,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE;YACzD,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACjF,OAAO,KAAK,CAAC;aACd;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,IAAI,gCAAgC,GAAW,EAAE,CAAC;QAClD,YAAY,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE;YAC9B,gCAAgC,IAAI,GAAG,MAAM,CAAC,IAAI;iBAC/C,WAAW,EAAE;iBACb,SAAS,EAAE,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC;QAChD,CAAC,CAAC,CAAC;QAEH,OAAO,gCAAgC,CAAC;IAC1C,CAAC;IAED;;;;OAIG;IACK,8BAA8B,CAAC,OAAoB;QACzD,MAAM,IAAI,GAAG,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAE5C,IAAI,2BAA2B,GAAW,EAAE,CAAC;QAC7C,2BAA2B,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,WAAW,GAAG,IAAI,EAAE,CAAC;QAErE,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,gBAAgB,GAA8B,EAAE,CAAC;QACvD,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;gBACzB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;oBACtD,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC;oBACvC,gBAAgB,CAAC,YAAY,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;oBAC9C,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;iBAC9B;aACF;YAED,SAAS,CAAC,IAAI,EAAE,CAAC;YACjB,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;gBAC3B,2BAA2B,IAAI,KAAK,GAAG,IAAI,kBAAkB,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC;aACxF;SACF;QAED,OAAO,2BAA2B,CAAC;IACrC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions, WebResource } from \"@azure/core-http\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { HeaderConstants } from \"../utils/constants\";\nimport { getURLPath, getURLQueries } from \"../utils/utils.common\";\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nexport class StorageSharedKeyCredentialPolicy extends CredentialPolicy {\n /**\n * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy\n */\n private readonly factory: StorageSharedKeyCredential;\n\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n factory: StorageSharedKeyCredential\n ) {\n super(nextPolicy, options);\n this.factory = factory;\n }\n\n /**\n * Signs request.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n\n if (\n request.body &&\n (typeof request.body === \"string\" || (request.body as Buffer) !== undefined) &&\n request.body.length > 0\n ) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n\n const stringToSign: string =\n [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE),\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n\n const signature: string = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(\n HeaderConstants.AUTHORIZATION,\n `SharedKey ${this.factory.accountName}:${signature}`\n );\n\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n }\n\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n private getHeaderValueToSign(request: WebResource, headerName: string): string {\n const value = request.headers.get(headerName);\n\n if (!value) {\n return \"\";\n }\n\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n\n return value;\n }\n\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n private getCanonicalizedHeadersString(request: WebResource): string {\n let headersArray = request.headers.headersArray().filter((value) => {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n\n headersArray.sort((a, b): number => {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n\n // Remove duplicate headers\n headersArray = headersArray.filter((value, index, array) => {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n\n let canonicalizedHeadersStringToSign: string = \"\";\n headersArray.forEach((header) => {\n canonicalizedHeadersStringToSign += `${header.name\n .toLowerCase()\n .trimRight()}:${header.value.trimLeft()}\\n`;\n });\n\n return canonicalizedHeadersStringToSign;\n }\n\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n private getCanonicalizedResourceString(request: WebResource): string {\n const path = getURLPath(request.url) || \"/\";\n\n let canonicalizedResourceString: string = \"\";\n canonicalizedResourceString += `/${this.factory.accountName}${path}`;\n\n const queries = getURLQueries(request.url);\n const lowercaseQueries: { [key: string]: string } = {};\n if (queries) {\n const queryKeys: string[] = [];\n for (const key in queries) {\n if (Object.prototype.hasOwnProperty.call(queries, key)) {\n const lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n\n queryKeys.sort();\n for (const key of queryKeys) {\n canonicalizedResourceString += `\\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;\n }\n }\n\n return canonicalizedResourceString;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js new file mode 100644 index 0000000..f5b173b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, HttpHeaders, isNode, } from "@azure/core-http"; +import { HeaderConstants } from "../utils/constants"; +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +export class TelemetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (isNode) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=TelemetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map new file mode 100644 index 0000000..5b4b98b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TelemetryPolicy.js","sourceRoot":"","sources":["../../../../src/policies/TelemetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,WAAW,EAEX,MAAM,GAIP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAErD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IAMpD;;;;;OAKG;IACH,YAAY,UAAyB,EAAE,OAA6B,EAAE,SAAiB;QACrF,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,MAAM,EAAE;YACV,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACrC;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;gBACpD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;aACjE;SACF;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpHeaders,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants } from \"../utils/constants\";\n\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nexport class TelemetryPolicy extends BaseRequestPolicy {\n /**\n * Telemetry string.\n */\n public readonly telemetry: string;\n\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {\n super(nextPolicy, options);\n this.telemetry = telemetry;\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js new file mode 100644 index 0000000..5203424 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "@azure/core-http"; +import { Poller } from "@azure/core-lro"; +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +export class BlobBeginCopyFromUrlPoller extends Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} +//# sourceMappingURL=BlobStartCopyFromUrlPoller.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map new file mode 100644 index 0000000..b41e869 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobStartCopyFromUrlPoller.js","sourceRoot":"","sources":["../../../../src/pollers/BlobStartCopyFromUrlPoller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAqC,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAsE5E;;;;;GAKG;AACH,MAAM,OAAO,0BAA2B,SAAQ,MAG/C;IAGC,YAAY,OAA0C;QACpD,MAAM,EACJ,UAAU,EACV,UAAU,EACV,YAAY,GAAG,KAAK,EACpB,UAAU,EACV,UAAU,EACV,uBAAuB,GACxB,GAAG,OAAO,CAAC;QAEZ,IAAI,KAAgD,CAAC;QAErD,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC;SACtC;QAED,MAAM,SAAS,GAAG,qCAAqC,iCAClD,KAAK,KACR,UAAU;YACV,UAAU;YACV,uBAAuB,IACvB,CAAC;QAEH,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;YACpC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;SAC7B;QAED,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,KAAK;QACV,OAAO,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;IAClC,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,MAAM,GAAgD,KAAK,UAAU,MAAM,CAE/E,OAAO,GAAG,EAAE;IAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;IACzB,IAAI,KAAK,CAAC,WAAW,EAAE;QACrB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;KACrD;IAED,IAAI,CAAC,MAAM,EAAE;QACX,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;QACzB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;KACrD;IAED,sFAAsF;IACtF,MAAM,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,MAAM,EAAE;QAC9C,WAAW,EAAE,OAAO,CAAC,WAAW;KACjC,CAAC,CAAC;IACH,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAEzB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,MAAM,GAAgD,KAAK,UAAU,MAAM,CAE/E,OAAO,GAAG,EAAE;IAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,uBAAuB,EAAE,GAAG,KAAK,CAAC;IAElE,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QACpB,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,gBAAgB,CAAC,UAAU,EAAE,uBAAuB,CAAC,CAAC;QAEtF,4BAA4B;QAC5B,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC7B,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE;YACnC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;YACtB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;KACF;SAAM,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;QAC7B,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;YAC1F,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,MAAM,CAAC;YAC5C,MAAM,gBAAgB,GAAG,KAAK,CAAC,YAAY,CAAC;YAC5C,IAAI,YAAY,EAAE;gBAChB,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;aACnC;YACD,IACE,UAAU,KAAK,SAAS;gBACxB,YAAY,KAAK,gBAAgB;gBACjC,OAAO,OAAO,CAAC,YAAY,KAAK,UAAU,EAC1C;gBACA,2CAA2C;gBAC3C,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;aAC7B;iBAAM,IAAI,UAAU,KAAK,SAAS,EAAE;gBACnC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;gBACtB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;iBAAM,IAAI,UAAU,KAAK,QAAQ,EAAE;gBAClC,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CACrB,kCAAkC,MAAM,CAAC,qBAAqB,IAAI,SAAS,GAAG,CAC/E,CAAC;gBACF,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;SACF;QAAC,OAAO,GAAQ,EAAE;YACjB,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC;YAClB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;KACF;IAED,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,QAAQ,GAAkD,SAAS,QAAQ;IAG/E,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;QAC1D,2FAA2F;QAC3F,IAAI,GAAG,KAAK,YAAY,EAAE;YACxB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,KAAK,CAAC;IACf,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;GAGG;AACH,SAAS,qCAAqC,CAC5C,KAAoC;IAEpC,OAAO;QACL,KAAK,oBAAO,KAAK,CAAE;QACnB,MAAM;QACN,QAAQ;QACR,MAAM;KACP,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { delay } from \"@azure/core-http\";\nimport { PollOperation, PollOperationState, Poller } from \"@azure/core-lro\";\nimport { BlobClient, BlobStartCopyFromURLOptions, BlobBeginCopyFromURLResponse } from \"../Clients\";\n\n/**\n * Defines the operations from a {@link BlobClient} that are needed for the poller\n * returned by {@link BlobClient.beginCopyFromURL} to work.\n */\nexport type CopyPollerBlobClient = Pick & {\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptions\n ): Promise;\n};\n\n/**\n * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}.\n *\n * This state is passed into the user-specified `onProgress` callback\n * whenever copy progress is detected.\n */\nexport interface BlobBeginCopyFromUrlPollState\n extends PollOperationState {\n /**\n * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}.\n */\n readonly blobClient: CopyPollerBlobClient;\n /**\n * The copyId that identifies the in-progress blob copy.\n */\n copyId?: string;\n /**\n * the progress of the blob copy as reported by the service.\n */\n copyProgress?: string;\n /**\n * The source URL provided in {@link BlobClient.beginCopyFromURL}.\n */\n copySource: string;\n /**\n * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call.\n * This is exposed for the poller and should not be modified directly.\n */\n readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * The PollOperation responsible for:\n * - performing the initial startCopyFromURL\n * - checking the copy status via getProperties\n * - cancellation via abortCopyFromURL\n * @hidden\n */\nexport interface BlobBeginCopyFromURLPollOperation\n extends PollOperation {}\n\n/**\n * The set of options used to configure the poller.\n * This is an internal interface populated by {@link BlobClient.beginCopyFromURL}.\n *\n * @hidden\n */\nexport interface BlobBeginCopyFromUrlPollerOptions {\n blobClient: CopyPollerBlobClient;\n copySource: string;\n intervalInMs?: number;\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n resumeFrom?: string;\n startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nexport class BlobBeginCopyFromUrlPoller extends Poller<\n BlobBeginCopyFromUrlPollState,\n BlobBeginCopyFromURLResponse\n> {\n public intervalInMs: number;\n\n constructor(options: BlobBeginCopyFromUrlPollerOptions) {\n const {\n blobClient,\n copySource,\n intervalInMs = 15000,\n onProgress,\n resumeFrom,\n startCopyFromURLOptions,\n } = options;\n\n let state: BlobBeginCopyFromUrlPollState | undefined;\n\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n\n const operation = makeBlobBeginCopyFromURLPollOperation({\n ...state,\n blobClient,\n copySource,\n startCopyFromURLOptions,\n });\n\n super(operation);\n\n if (typeof onProgress === \"function\") {\n this.onProgress(onProgress);\n }\n\n this.intervalInMs = intervalInMs;\n }\n\n public delay(): Promise {\n return delay(this.intervalInMs);\n }\n}\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst cancel: BlobBeginCopyFromURLPollOperation[\"cancel\"] = async function cancel(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n) {\n const state = this.state;\n const { copyId } = state;\n if (state.isCompleted) {\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n if (!copyId) {\n state.isCancelled = true;\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n await state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n });\n state.isCancelled = true;\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst update: BlobBeginCopyFromURLPollOperation[\"update\"] = async function update(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n): Promise {\n const state = this.state;\n const { blobClient, copySource, startCopyFromURLOptions } = state;\n\n if (!state.isStarted) {\n state.isStarted = true;\n const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);\n\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n } else if (!state.isCompleted) {\n try {\n const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });\n const { copyStatus, copyProgress } = result;\n const prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (\n copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\"\n ) {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n } else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n } else if (copyStatus === \"failed\") {\n state.error = new Error(\n `Blob copy failed with reason: \"${result.copyStatusDescription || \"unknown\"}\"`\n );\n state.isCompleted = true;\n }\n } catch (err: any) {\n state.error = err;\n state.isCompleted = true;\n }\n }\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst toString: BlobBeginCopyFromURLPollOperation[\"toString\"] = function toString(\n this: BlobBeginCopyFromURLPollOperation\n) {\n return JSON.stringify({ state: this.state }, (key, value) => {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(\n state: BlobBeginCopyFromUrlPollState\n): BlobBeginCopyFromURLPollOperation {\n return {\n state: { ...state },\n cancel,\n toString,\n update,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js new file mode 100644 index 0000000..1410738 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=AccountSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map new file mode 100644 index 0000000..c1e0017 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,qBAAqB;IAAlC;QA4GE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;IA0D1C,CAAC;IApOC;;;;OAIG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAE1D,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;YAC3B,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC3C,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACrC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACnD,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC7C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,EAAE,CAAC,CAAC;aAC9D;SACF;QAED,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAAyC;QAC1D,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAC1D,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;SAC5C;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;SACtC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACpD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;SAC9C;QACD,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAmED;;;;;;;;;OASG;IACI,QAAQ;QACb,iFAAiF;QACjF,wFAAwF;QACxF,iFAAiF;QACjF,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class AccountSASPermissions {\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n public static parse(permissions: string): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n\n for (const c of permissions) {\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n case \"i\":\n accountSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n accountSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission character: ${c}`);\n }\n }\n\n return accountSASPermissions;\n }\n\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n accountSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n accountSASPermissions.permanentDelete = true;\n }\n return accountSASPermissions;\n }\n\n /**\n * Permission to read resources and list queues and tables granted.\n */\n public read: boolean = false;\n\n /**\n * Permission to write resources granted.\n */\n public write: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public delete: boolean = false;\n\n /**\n * Permission to delete versions granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n public list: boolean = false;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n public add: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public create: boolean = false;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n public update: boolean = false;\n\n /**\n * Permission to get and delete messages granted.\n */\n public process: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Permission to filter blobs.\n */\n public filter: boolean = false;\n\n /**\n * Permission to set immutability policy.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like an account SAS permission.\n * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface AccountSASPermissionsLike {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n read?: boolean;\n\n /**\n * Permission to write resources granted.\n */\n write?: boolean;\n\n /**\n * Permission to delete blobs and files granted.\n */\n delete?: boolean;\n\n /**\n * Permission to delete versions granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n list?: boolean;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n add?: boolean;\n\n /**\n * Permission to create blobs and files granted.\n */\n create?: boolean;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n update?: boolean;\n\n /**\n * Permission to get and delete messages granted.\n */\n process?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Permission to filter blobs.\n */\n filter?: boolean;\n\n /**\n * Permission to set immutability policy.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js new file mode 100644 index 0000000..7780df0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} +//# sourceMappingURL=AccountSASResourceTypes.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map new file mode 100644 index 0000000..23f49a5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASResourceTypes.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASResourceTypes.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,uBAAuB;IAApC;QA6BE;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,cAAS,GAAY,KAAK,CAAC;QAElC;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;IAqBjC,CAAC;IA9DC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,aAAqB;QACvC,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAE9D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;YAC7B,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,SAAS,GAAG,IAAI,CAAC;oBACzC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;aACvD;SACF;QAED,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAiBD;;;;;OAKG;IACI,QAAQ;QACb,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,OAAO,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAChC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nexport class AccountSASResourceTypes {\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n public static parse(resourceTypes: string): AccountSASResourceTypes {\n const accountSASResourceTypes = new AccountSASResourceTypes();\n\n for (const c of resourceTypes) {\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(`Invalid resource type: ${c}`);\n }\n }\n\n return accountSASResourceTypes;\n }\n\n /**\n * Permission to access service level APIs granted.\n */\n public service: boolean = false;\n\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n public container: boolean = false;\n\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n public object: boolean = false;\n\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n const resourceTypes: string[] = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js new file mode 100644 index 0000000..2e6f1e7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} +//# sourceMappingURL=AccountSASServices.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map new file mode 100644 index 0000000..1a36f19 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASServices.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASServices.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAA/B;QAgCE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;IAsBhC,CAAC;IAvEC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,QAAgB;QAClC,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QAEpD,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE;YACxB,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,EAAE,CAAC,CAAC;aAC3D;SACF;QAED,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAsBD;;;OAGG;IACI,QAAQ;QACb,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nexport class AccountSASServices {\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n public static parse(services: string): AccountSASServices {\n const accountSASServices = new AccountSASServices();\n\n for (const c of services) {\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(`Invalid service character: ${c}`);\n }\n }\n\n return accountSASServices;\n }\n\n /**\n * Permission to access blob resources granted.\n */\n public blob: boolean = false;\n\n /**\n * Permission to access file resources granted.\n */\n public file: boolean = false;\n\n /**\n * Permission to access queue resources granted.\n */\n public queue: boolean = false;\n\n /**\n * Permission to access table resources granted.\n */\n public table: boolean = false;\n\n /**\n * Converts the given services to a string.\n *\n */\n public toString(): string {\n const services: string[] = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js new file mode 100644 index 0000000..ceaf07c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AccountSASPermissions } from "./AccountSASPermissions"; +import { AccountSASResourceTypes } from "./AccountSASResourceTypes"; +import { AccountSASServices } from "./AccountSASServices"; +import { ipRangeToString } from "./SasIPRange"; +import { SASQueryParameters } from "./SASQueryParameters"; +import { SERVICE_VERSION } from "../utils/constants"; +import { truncatedISO8061Date } from "../utils/utils.common"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} +//# sourceMappingURL=AccountSASSignatureValues.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map new file mode 100644 index 0000000..7e87510 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASSignatureValues.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASSignatureValues.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAc,eAAe,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAe,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAmE7D;;;;;;;;;;GAUG;AACH,MAAM,UAAU,iCAAiC,CAC/C,yBAAoD,EACpD,mBAA+C;IAE/C,MAAM,OAAO,GAAG,yBAAyB,CAAC,OAAO;QAC/C,CAAC,CAAC,yBAAyB,CAAC,OAAO;QACnC,CAAC,CAAC,eAAe,CAAC;IAEpB,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,qBAAqB;QAC3D,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,aAAa;QACnD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,eAAe;QACrD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,GAAG;QACzC,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,MAAM;QAC5C,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IAAI,yBAAyB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;QACvE,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;KAC/F;IAED,MAAM,iBAAiB,GAAG,qBAAqB,CAAC,KAAK,CACnD,yBAAyB,CAAC,WAAW,CAAC,QAAQ,EAAE,CACjD,CAAC;IACF,MAAM,cAAc,GAAG,kBAAkB,CAAC,KAAK,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;IAC/F,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACvD,yBAAyB,CAAC,aAAa,CACxC,CAAC,QAAQ,EAAE,CAAC;IAEb,IAAI,YAAoB,CAAC;IAEzB,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,YAAY,GAAG;YACb,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;YACnB,yBAAyB,CAAC,QAAQ;gBAChC,CAAC,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBACjE,CAAC,CAAC,EAAE;YACN,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;YAChE,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO;YACP,yBAAyB,CAAC,eAAe,CAAC,CAAC,CAAC,yBAAyB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;YAC1F,EAAE,EAAE,uDAAuD;SAC5D,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACd;SAAM;QACL,YAAY,GAAG;YACb,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;YACnB,yBAAyB,CAAC,QAAQ;gBAChC,CAAC,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBACjE,CAAC,CAAC,EAAE;YACN,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;YAChE,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO;YACP,EAAE,EAAE,uDAAuD;SAC5D,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACd;IAED,MAAM,SAAS,GAAW,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAE9E,OAAO,IAAI,kBAAkB,CAC3B,OAAO,EACP,SAAS,EACT,iBAAiB,CAAC,QAAQ,EAAE,EAC5B,cAAc,EACd,mBAAmB,EACnB,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,SAAS,EACnC,yBAAyB,CAAC,OAAO,EACjC,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,yBAAyB,CAAC,eAAe,CAC1C,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccountSASPermissions } from \"./AccountSASPermissions\";\nimport { AccountSASResourceTypes } from \"./AccountSASResourceTypes\";\nimport { AccountSASServices } from \"./AccountSASServices\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once\n * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation\n * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters}\n * exist because the former is mutable and a logical representation while the latter is immutable and used to generate\n * actual REST requests.\n *\n * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1\n * for more conceptual information on SAS\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n * for descriptions of the parameters, including which are required\n */\nexport interface AccountSASSignatureValues {\n /**\n * If not provided, this defaults to the service version targeted by this version of the library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * The time after which the SAS will no longer work.\n */\n expiresOn: Date;\n\n /**\n * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help\n * constructing the permissions string.\n */\n permissions: AccountSASPermissions;\n\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n\n /**\n * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to\n * construct this value.\n */\n services: string;\n\n /**\n * The values that indicate the resource types accessible with this SAS. Please refer\n * to {@link AccountSASResourceTypes} to construct this value.\n */\n resourceTypes: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateAccountSASQueryParameters(\n accountSASSignatureValues: AccountSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n const version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'y' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n\n if (accountSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n const parsedPermissions = AccountSASPermissions.parse(\n accountSASSignatureValues.permissions.toString()\n );\n const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n const parsedResourceTypes = AccountSASResourceTypes.parse(\n accountSASSignatureValues.resourceTypes\n ).toString();\n\n let stringToSign: string;\n\n if (version >= \"2020-12-06\") {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : \"\",\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n } else {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n }\n\n const signature: string = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n version,\n signature,\n parsedPermissions.toString(),\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.protocol,\n accountSASSignatureValues.startsOn,\n accountSASSignatureValues.expiresOn,\n accountSASSignatureValues.ipRange,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n accountSASSignatureValues.encryptionScope\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js new file mode 100644 index 0000000..ea435c1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=BlobSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map new file mode 100644 index 0000000..c3768ea --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/BlobSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAA/B;QAiGE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;IA6C1C,CAAC;IAlMC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QAEpD,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;YAC9B,QAAQ,IAAI,EAAE;gBACZ,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;oBACxC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBAChD,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC1C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,uBAAuB,IAAI,EAAE,CAAC,CAAC;aACvD;SACF;QAED,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAAsC;QACvD,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QACpD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;SAChC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;SAC/B;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;SACjC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;SACzC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;SAC/B;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;SAChC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACjD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;SAC3C;QACD,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAyDD;;;;;OAKG;IACI,QAAQ;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class BlobSASPermissions {\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n case \"i\":\n blobSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n blobSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission: ${char}`);\n }\n }\n\n return blobSASPermissions;\n }\n\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n blobSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n blobSASPermissions.permanentDelete = true;\n }\n return blobSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Blob SAS permission.\n * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface BlobSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js new file mode 100644 index 0000000..f7d47f9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js @@ -0,0 +1,555 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BlobSASPermissions } from "./BlobSASPermissions"; +import { ContainerSASPermissions } from "./ContainerSASPermissions"; +import { StorageSharedKeyCredential } from "../credentials/StorageSharedKeyCredential"; +import { UserDelegationKeyCredential } from "../credentials/UserDelegationKeyCredential"; +import { ipRangeToString } from "./SasIPRange"; +import { SASQueryParameters } from "./SASQueryParameters"; +import { SERVICE_VERSION } from "../utils/constants"; +import { truncatedISO8061Date } from "../utils/utils.common"; +export function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} +//# sourceMappingURL=BlobSASSignatureValues.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map new file mode 100644 index 0000000..38f9b76 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobSASSignatureValues.js","sourceRoot":"","sources":["../../../../src/sas/BlobSASSignatureValues.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,0BAA0B,EAAE,MAAM,2CAA2C,CAAC;AACvF,OAAO,EAAE,2BAA2B,EAAE,MAAM,4CAA4C,CAAC;AACzF,OAAO,EAAE,eAAe,EAAc,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAe,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AA4O7D,MAAM,UAAU,8BAA8B,CAC5C,sBAA8C,EAC9C,sCAAsF,EACtF,WAAoB;IAEpB,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;IAElG,MAAM,mBAAmB,GACvB,sCAAsC,YAAY,0BAA0B;QAC1E,CAAC,CAAC,sCAAsC;QACxC,CAAC,CAAC,SAAS,CAAC;IAChB,IAAI,2BAAoE,CAAC;IAEzE,IAAI,mBAAmB,KAAK,SAAS,IAAI,WAAW,KAAK,SAAS,EAAE;QAClE,2BAA2B,GAAG,IAAI,2BAA2B,CAC3D,WAAW,EACX,sCAA2D,CAC5D,CAAC;KACH;IAED,IAAI,mBAAmB,KAAK,SAAS,IAAI,2BAA2B,KAAK,SAAS,EAAE;QAClF,MAAM,SAAS,CAAC,gEAAgE,CAAC,CAAC;KACnF;IAED,8DAA8D;IAC9D,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;SACH;KACF;IAED,gEAAgE;IAChE,gGAAgG;IAChG,yHAAyH;IACzH,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,8HAA8H;YAC9H,IAAI,OAAO,IAAI,YAAY,EAAE;gBAC3B,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;aACH;iBAAM;gBACL,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;aACH;SACF;KACF;IAED,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,kGAAkG,CACnG,CAAC;SACH;KACF;IAED,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;KAChB;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,SAAS,EACT,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,CAC9C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;QACT,sBAAsB,CAAC,aAAa;QACpC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,CACrC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;QACT,sBAAsB,CAAC,aAAa;QACpC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,gBAAgB,CAAC,WAAmB,EAAE,aAAqB,EAAE,QAAiB;IACrF,2CAA2C;IAC3C,oDAAoD;IACpD,MAAM,QAAQ,GAAa,CAAC,SAAS,WAAW,IAAI,aAAa,EAAE,CAAC,CAAC;IACrE,IAAI,QAAQ,EAAE;QACZ,QAAQ,CAAC,IAAI,CAAC,IAAI,QAAQ,EAAE,CAAC,CAAC;KAC/B;IACD,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,wCAAwC,CAC/C,sBAA8C;IAE9C,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;IAClG,IAAI,sBAAsB,CAAC,YAAY,IAAI,OAAO,GAAG,YAAY,EAAE;QACjE,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,YAAY,EAAE;QACxF,MAAM,UAAU,CAAC,wDAAwD,CAAC,CAAC;KAC5E;IAED,IAAI,sBAAsB,CAAC,SAAS,IAAI,OAAO,GAAG,YAAY,EAAE;QAC9D,MAAM,UAAU,CAAC,+DAA+D,CAAC,CAAC;KACnF;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,SAAS,EAAE;QACrF,MAAM,UAAU,CAAC,qDAAqD,CAAC,CAAC;KACzE;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,qBAAqB;QACxD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,aAAa;QAChD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,eAAe;QAClD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,GAAG;QACtC,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,OAAO,GAAG,YAAY;QACtB,sBAAsB,CAAC,WAAW;QAClC,CAAC,sBAAsB,CAAC,WAAW,CAAC,IAAI,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,EACvF;QACA,MAAM,UAAU,CAAC,6EAA6E,CAAC,CAAC;KACjG;IAED,IACE,OAAO,GAAG,YAAY;QACtB,sBAAsB,CAAC,WAAW;QACjC,sBAAsB,CAAC,WAAuC,CAAC,YAAY,EAC5E;QACA,MAAM,UAAU,CAAC,sEAAsE,CAAC,CAAC;KAC1F;IAED,IACE,OAAO,GAAG,YAAY;QACtB,CAAC,sBAAsB,CAAC,0BAA0B,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAC3F;QACA,MAAM,UAAU,CACd,mGAAmG,CACpG,CAAC;KACH;IAED,IAAI,sBAAsB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;QACpE,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;KAC/F;IAED,sBAAsB,CAAC,OAAO,GAAG,OAAO,CAAC;IACzC,OAAO,sBAAsB,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { BlobSASPermissions } from \"./BlobSASPermissions\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\nimport { ContainerSASPermissions } from \"./ContainerSASPermissions\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { UserDelegationKeyCredential } from \"../credentials/UserDelegationKeyCredential\";\nimport { ipRangeToString, SasIPRange } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs.\n */\nexport interface BlobSASSignatureValues {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource\n * being accessed for help constructing the permissions string.\n */\n permissions?: BlobSASPermissions | ContainerSASPermissions;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * The name of the container the SAS user may access.\n */\n containerName: string;\n\n /**\n * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided.\n */\n blobName?: string;\n\n /**\n * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09.\n */\n snapshotTime?: string;\n\n /**\n * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10.\n */\n versionId?: string;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user\n * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will\n * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission\n * check for the user specified in this value will be performed. This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to\n * correlate SAS generation with storage resource access. This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * Fill in the required details before running the following snippets.\n *\n * Example usage:\n *\n * ```js\n * // Generate service level SAS for a container\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using an identifier:\n *\n * ```js\n * // Generate service level SAS for a container with identifier\n * // startsOn & permissions are optional when identifier is provided\n * const identifier = \"unique-id\";\n * await containerClient.setAccessPolicy(undefined, [\n * {\n * accessPolicy: {\n * expiresOn: new Date(new Date().valueOf() + 86400), // Date type\n * permissions: ContainerSASPermissions.parse(\"racwdl\").toString(),\n * startsOn: new Date() // Date type\n * },\n * id: identifier\n * }\n * ]);\n *\n * const containerSAS = generateBlobSASQueryParameters(\n * {\n * containerName, // Required\n * identifier // Required\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using a blob name:\n *\n * ```js\n * // Generate service level SAS for a blob\n * const blobSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * blobName, // Required\n * permissions: BlobSASPermissions.parse(\"racwd\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * cacheControl: \"cache-control-override\", // Optional\n * contentDisposition: \"content-disposition-override\", // Optional\n * contentEncoding: \"content-encoding-override\", // Optional\n * contentLanguage: \"content-language-override\", // Optional\n * contentType: \"content-type-override\", // Optional\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters;\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required.\n *\n * Example usage:\n *\n * ```js\n * // Generate user delegation SAS for a container\n * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn, // Optional. Date type\n * expiresOn, // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2018-11-09\" // Must greater than or equal to 2018-11-09 to generate user delegation SAS\n * },\n * userDelegationKey, // UserDelegationKey\n * accountName\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`\n * @param accountName -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKey: UserDelegationKey,\n accountName: string\n): SASQueryParameters;\n\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredentialOrUserDelegationKey: StorageSharedKeyCredential | UserDelegationKey,\n accountName?: string\n): SASQueryParameters {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n\n const sharedKeyCredential =\n sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n let userDelegationKeyCredential: UserDelegationKeyCredential | undefined;\n\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(\n accountName,\n sharedKeyCredentialOrUserDelegationKey as UserDelegationKey\n );\n }\n\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n\n // Version 2020-12-06 adds support for encryptionscope in SAS.\n if (version >= \"2020-12-06\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);\n } else {\n return generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n } else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n } else {\n return generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n }\n\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n } else {\n throw new RangeError(\n \"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\"\n );\n }\n }\n\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n undefined,\n undefined,\n undefined,\n blobSASSignatureValues.encryptionScope\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.encryptionScope\n );\n}\n\nfunction getCanonicalName(accountName: string, containerName: string, blobName?: string): string {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n const elements: string[] = [`/blob/${accountName}/${containerName}`];\n if (blobName) {\n elements.push(`/${blobName}`);\n }\n return elements.join(\"\");\n}\n\nfunction SASSignatureValuesSanityCheckAndAutofill(\n blobSASSignatureValues: BlobSASSignatureValues\n): BlobSASSignatureValues {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'y' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)\n ) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n\n if (\n version < \"2021-04-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions as ContainerSASPermissions).filterByTags\n ) {\n throw RangeError(\"'version' must be >= '2021-04-10' when providing the 'f' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)\n ) {\n throw RangeError(\n \"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\"\n );\n }\n\n if (blobSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js new file mode 100644 index 0000000..d25bfd4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=ContainerSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map new file mode 100644 index 0000000..068b7ab --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ContainerSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/ContainerSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;GAMG;AACH,MAAM,OAAO,uBAAuB;IAApC;QA6GE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;QAExC;;WAEG;QACI,iBAAY,GAAY,KAAK,CAAC;IAqDvC,CAAC;IAhOC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAE9D,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;YAC9B,QAAQ,IAAI,EAAE;gBACZ,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACrC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC7C,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACrD,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC/C,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC5C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,sBAAsB,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;QAED,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAA2C;QAC5D,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAC9D,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;SACvC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;SACtC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;SACvC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;SAC9C;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;SACxC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACtD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;SAChD;QACD,IAAI,cAAc,CAAC,YAAY,EAAE;YAC/B,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;SAC7C;QACD,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAmED;;;;;;;OAOG;IACI,QAAQ;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class ContainerSASPermissions {\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n case \"i\":\n containerSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n containerSASPermissions.permanentDelete = true;\n break;\n case \"f\":\n containerSASPermissions.filterByTags = true;\n break;\n default:\n throw new RangeError(`Invalid permission ${char}`);\n }\n }\n\n return containerSASPermissions;\n }\n\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n containerSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n containerSASPermissions.permanentDelete = true;\n }\n if (permissionLike.filterByTags) {\n containerSASPermissions.filterByTags = true;\n }\n return containerSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specifies List access granted.\n */\n public list: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n public filterByTags: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n if (this.filterByTags) {\n permissions.push(\"f\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Container SAS permission.\n * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface ContainerSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specifies List access granted.\n */\n list?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n filterByTags?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js new file mode 100644 index 0000000..c3869d6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { ipRangeToString } from "./SasIPRange"; +import { truncatedISO8061Date } from "../utils/utils.common"; +/** + * Protocols for generated SAS. + */ +export var SASProtocol; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(SASProtocol || (SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} +//# sourceMappingURL=SASQueryParameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map new file mode 100644 index 0000000..7b597a2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SASQueryParameters.js","sourceRoot":"","sources":["../../../../src/sas/SASQueryParameters.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAc,eAAe,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAG7D;;GAEG;AACH,MAAM,CAAN,IAAY,WAUX;AAVD,WAAY,WAAW;IACrB;;OAEG;IACH,8BAAe,CAAA;IAEf;;OAEG;IACH,0CAA2B,CAAA;AAC7B,CAAC,EAVW,WAAW,KAAX,WAAW,QAUtB;AA4FD;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAsN7B,YACE,OAAe,EACf,SAAiB,EACjB,oBAAyD,EACzD,QAAiB,EACjB,aAAsB,EACtB,QAAsB,EACtB,QAAe,EACf,SAAgB,EAChB,OAAoB,EACpB,UAAmB,EACnB,QAAiB,EACjB,YAAqB,EACrB,kBAA2B,EAC3B,eAAwB,EACxB,eAAwB,EACxB,WAAoB,EACpB,iBAAqC,EACrC,0BAAmC,EACnC,aAAsB,EACtB,eAAwB;QAExB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,oBAAoB,KAAK,SAAS,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;YAClF,4BAA4B;YAC5B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YACpD,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;YACxD,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;YAChD,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC;YACjD,IAAI,CAAC,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;YAClD,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,YAAY,CAAC;YACtD,IAAI,CAAC,kBAAkB,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;YAClE,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YAEpD,IAAI,oBAAoB,CAAC,iBAAiB,EAAE;gBAC1C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBACvE,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,eAAe,CAAC;gBAC9E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAC1E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAE1E,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC,0BAA0B,CAAC;gBAClF,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;aACzD;SACF;aAAM;YACL,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;YAC3B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;YACxC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC;YAC5B,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;YACjC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;YAE/B,IAAI,iBAAiB,EAAE;gBACrB,IAAI,CAAC,SAAS,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBAClD,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBACvD,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBACvD,IAAI,CAAC,eAAe,GAAG,iBAAiB,CAAC,eAAe,CAAC;gBACzD,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;gBACrD,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;gBAErD,IAAI,CAAC,0BAA0B,GAAG,0BAA0B,CAAC;gBAC7D,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;aACpC;SACF;IACH,CAAC;IA1JD;;;;OAIG;IACH,IAAW,OAAO;QAChB,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,OAAO;gBACL,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,GAAG;gBAC1B,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,KAAK;aAC/B,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IA+ID;;;OAGG;IACI,QAAQ;QACb,MAAM,MAAM,GAAa;YACvB,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,KAAK;YACL,OAAO;YACP,OAAO;YACP,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,OAAO;YACP,MAAM;SACP,CAAC;QACF,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;YAC1B,QAAQ,KAAK,EAAE;gBACb,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC3D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACvE,CAAC;oBACF,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACzE,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,SAAS,CACzD,CAAC;oBACF,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;oBAC9D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,OAAO,EAAE,mBAAmB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,OAAO,EAAE,mBAAmB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;oBAClE,MAAM;gBACR,KAAK,KAAK,EAAE,wBAAwB;oBAClC,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,cAAc,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACnF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK,EAAE,yBAAyB;oBACnC,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACrF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK,EAAE,qBAAqB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK,EAAE,qBAAqB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;oBAChE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,kBAAkB,CAAC,CAAC;oBACtE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,0BAA0B,CAAC,CAAC;oBAC9E,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;aACT;SACF;QACD,OAAO,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;OAMG;IACK,uBAAuB,CAAC,OAAiB,EAAE,GAAW,EAAE,KAAc;QAC5E,IAAI,CAAC,KAAK,EAAE;YACV,OAAO;SACR;QAED,GAAG,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC;QAC9B,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,KAAK,EAAE,CAAC,CAAC;SACjC;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * Protocols for generated SAS.\n */\nexport enum SASProtocol {\n /**\n * Protocol that allows HTTPS only\n */\n Https = \"https\",\n\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n HttpsAndHttp = \"https,http\",\n}\n\n/**\n * Options to construct {@link SASQueryParameters}.\n */\nexport interface SASQueryParametersOptions {\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n permissions?: string;\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n services?: string;\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n resourceTypes?: string;\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n protocol?: SASProtocol;\n /**\n * Optional. The start time for this SAS token.\n */\n startsOn?: Date;\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n expiresOn?: Date;\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n resource?: string;\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n cacheControl?: string;\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n contentDisposition?: string;\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n contentEncoding?: string;\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n contentLanguage?: string;\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n contentType?: string;\n /**\n * User delegation key properties.\n */\n userDelegationKey?: UserDelegationKey;\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}.\n * This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nexport class SASQueryParameters {\n /**\n * The storage API version.\n */\n public readonly version: string;\n\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n public readonly protocol?: SASProtocol;\n\n /**\n * Optional. The start time for this SAS token.\n */\n public readonly startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n public readonly expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n public readonly permissions?: string;\n\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n public readonly services?: string;\n\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n public readonly resourceTypes?: string;\n\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n public readonly identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n public readonly encryptionScope?: string;\n\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n public readonly resource?: string;\n\n /**\n * The signature for the SAS token.\n */\n public readonly signature: string;\n\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n public readonly cacheControl?: string;\n\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n public readonly contentDisposition?: string;\n\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n public readonly contentEncoding?: string;\n\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n public readonly contentLanguage?: string;\n\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n public readonly contentType?: string;\n\n /**\n * Inner value of getter ipRange.\n */\n private readonly ipRangeInner?: SasIPRange;\n\n /**\n * The Azure Active Directory object ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedOid?: string;\n\n /**\n * The Azure Active Directory tenant ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedTenantId?: string;\n\n /**\n * The date-time the key is active.\n * Property of user delegation key.\n */\n private readonly signedStartsOn?: Date;\n\n /**\n * The date-time the key expires.\n * Property of user delegation key.\n */\n private readonly signedExpiresOn?: Date;\n\n /**\n * Abbreviation of the Azure Storage service that accepts the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedService?: string;\n\n /**\n * The service version that created the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedVersion?: string;\n\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This is only used for User Delegation SAS.\n */\n public readonly preauthorizedAgentObjectId?: string;\n\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n public readonly correlationId?: string;\n\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n public get ipRange(): SasIPRange | undefined {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start,\n };\n }\n return undefined;\n }\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param permissions - Representing the storage permissions\n * @param services - Representing the storage services being accessed (only for Account SAS)\n * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS)\n * @param protocol - Representing the allowed HTTP protocol(s)\n * @param startsOn - Representing the start time for this SAS token\n * @param expiresOn - Representing the expiry time for this SAS token\n * @param ipRange - Representing the range of valid IP addresses for this SAS token\n * @param identifier - Representing the signed identifier (only for Service SAS)\n * @param resource - Representing the storage container or blob (only for Service SAS)\n * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS)\n * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS)\n * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS)\n * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS)\n * @param contentType - Representing the content-type header (only for Blob/File Service SAS)\n * @param userDelegationKey - Representing the user delegation key properties\n * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS)\n * @param correlationId - Representing the correlation ID (only for User Delegation SAS)\n * @param encryptionScope -\n */\n constructor(\n version: string,\n signature: string,\n permissions?: string,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n );\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param options - Optional. Options to construct the SASQueryParameters.\n */\n constructor(version: string, signature: string, options?: SASQueryParametersOptions);\n\n constructor(\n version: string,\n signature: string,\n permissionsOrOptions?: string | SASQueryParametersOptions,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n ) {\n this.version = version;\n this.signature = signature;\n\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.encryptionScope = permissionsOrOptions.encryptionScope;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n } else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.encryptionScope = encryptionScope;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n public toString(): string {\n const params: string[] = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"ses\",\n \"skoid\", // Signed object ID\n \"sktid\", // Signed tenant ID\n \"skt\", // Signed key start time\n \"ske\", // Signed key expiry time\n \"sks\", // Signed key service\n \"skv\", // Signed key version\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\",\n ];\n const queries: string[] = [];\n\n for (const param of params) {\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined\n );\n break;\n case \"se\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined\n );\n break;\n case \"sip\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.ipRange ? ipRangeToString(this.ipRange) : undefined\n );\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"ses\":\n this.tryAppendQueryParameter(queries, param, this.encryptionScope);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined\n );\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined\n );\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n }\n\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n private tryAppendQueryParameter(queries: string[], key: string, value?: string): void {\n if (!value) {\n return;\n }\n\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(`${key}=${value}`);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js new file mode 100644 index 0000000..9f386b6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +export function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} +//# sourceMappingURL=SasIPRange.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map new file mode 100644 index 0000000..0a67a11 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SasIPRange.js","sourceRoot":"","sources":["../../../../src/sas/SasIPRange.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,eAAe,CAAC,OAAmB;IACjD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;AACzE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allowed IP range for a SAS.\n */\nexport interface SasIPRange {\n /**\n * Starting IP address in the IP range.\n * If end IP doesn't provide, start IP will the only IP allowed.\n */\n start: string;\n /**\n * Optional. IP address that ends the IP range.\n * If not provided, start IP will the only IP allowed.\n */\n end?: string;\n}\n\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nexport function ipRangeToString(ipRange: SasIPRange): string {\n return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js new file mode 100644 index 0000000..d268208 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +import { EventEmitter } from "events"; +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +export class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} +//# sourceMappingURL=Batch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map new file mode 100644 index 0000000..62abe3e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Batch.js","sourceRoot":"","sources":["../../../../src/utils/Batch.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,8FAA8F;AAC9F,mCAAmC;AACnC,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAOtC;;GAEG;AACH,IAAK,WAGJ;AAHD,WAAK,WAAW;IACd,6CAAI,CAAA;IACJ,+CAAK,CAAA;AACP,CAAC,EAHI,WAAW,KAAX,WAAW,QAGf;AAED;;;;GAIG;AACH,MAAM,OAAO,KAAK;IAqChB;;;OAGG;IACH,YAAmB,cAAsB,CAAC;QAnC1C;;WAEG;QACK,YAAO,GAAW,CAAC,CAAC;QAE5B;;WAEG;QACK,cAAS,GAAW,CAAC,CAAC;QAE9B;;WAEG;QACK,WAAM,GAAW,CAAC,CAAC;QAE3B;;WAEG;QACK,eAAU,GAAgB,EAAE,CAAC;QAErC;;;WAGG;QACK,UAAK,GAAgB,WAAW,CAAC,IAAI,CAAC;QAY5C,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,mCAAmC,CAAC,CAAC;SAC3D;QACD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,YAAY,EAAE,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACI,YAAY,CAAC,SAAoB;QACtC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;YAC9B,IAAI;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,MAAM,SAAS,EAAE,CAAC;gBAClB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,IAAI,CAAC,eAAe,EAAE,CAAC;aACxB;YAAC,OAAO,KAAU,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;aACnC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,EAAE;QACb,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAChC,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;SAC1B;QAED,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC3C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAEnC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;gBACjC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;gBAC/B,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACK,aAAa;QACnB,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;OAIG;IACK,eAAe;QACrB,IAAI,IAAI,CAAC,KAAK,KAAK,WAAW,CAAC,KAAK,EAAE;YACpC,OAAO;SACR;QAED,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO;SACR;QAED,OAAO,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;YACtC,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;YACvC,IAAI,SAAS,EAAE;gBACb,SAAS,EAAE,CAAC;aACb;iBAAM;gBACL,OAAO;aACR;SACF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// In browser, during webpack or browserify bundling, this module will be replaced by 'events'\n// https://github.com/Gozala/events\nimport { EventEmitter } from \"events\";\n\n/**\n * Operation is an async function to be executed and managed by Batch.\n */\nexport declare type Operation = () => Promise;\n\n/**\n * States for Batch.\n */\nenum BatchStates {\n Good,\n Error,\n}\n\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nexport class Batch {\n /**\n * Concurrency. Must be lager than 0.\n */\n private concurrency: number;\n\n /**\n * Number of active operations under execution.\n */\n private actives: number = 0;\n\n /**\n * Number of completed operations under execution.\n */\n private completed: number = 0;\n\n /**\n * Offset of next operation to be executed.\n */\n private offset: number = 0;\n\n /**\n * Operation array to be executed.\n */\n private operations: Operation[] = [];\n\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n private state: BatchStates = BatchStates.Good;\n\n /**\n * A private emitter used to pass events inside this class.\n */\n private emitter: EventEmitter;\n\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n public constructor(concurrency: number = 5) {\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new EventEmitter();\n }\n\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n public addOperation(operation: Operation): void {\n this.operations.push(async () => {\n try {\n this.actives++;\n await operation();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n } catch (error: any) {\n this.emitter.emit(\"error\", error);\n }\n });\n }\n\n /**\n * Start execute operations in the queue.\n *\n */\n public async do(): Promise {\n if (this.operations.length === 0) {\n return Promise.resolve();\n }\n\n this.parallelExecute();\n\n return new Promise((resolve, reject) => {\n this.emitter.on(\"finish\", resolve);\n\n this.emitter.on(\"error\", (error) => {\n this.state = BatchStates.Error;\n reject(error);\n });\n });\n }\n\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n private nextOperation(): Operation | null {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n }\n\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n private parallelExecute(): void {\n if (this.state === BatchStates.Error) {\n return;\n }\n\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n\n while (this.actives < this.concurrency) {\n const operation = this.nextOperation();\n if (operation) {\n operation();\n } else {\n return;\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js new file mode 100644 index 0000000..cfe60cc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +import { AvroReadableFromStream, AvroReader } from "../../../storage-internal-avro/src"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +export class BlobQuickQueryStream extends Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} +//# sourceMappingURL=BlobQuickQueryStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map new file mode 100644 index 0000000..db25b58 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQuickQueryStream.js","sourceRoot":"","sources":["../../../../src/utils/BlobQuickQueryStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAKlC,OAAO,EAAE,sBAAsB,EAAE,UAAU,EAAE,MAAM,oCAAoC,CAAC;AAqBxF;;;;GAIG;AACH,MAAM,OAAO,oBAAqB,SAAQ,QAAQ;IAQhD;;;;;OAKG;IACH,YAAmB,MAA6B,EAAE,UAAuC,EAAE;QACzF,KAAK,EAAE,CAAC;QAXF,eAAU,GAAY,IAAI,CAAC;QAYjC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,sBAAsB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IACrF,CAAC;IAEM,KAAK;QACV,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;gBAChC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAC1B,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IAEO,KAAK,CAAC,YAAY;QACxB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;QACxB,IAAI,QAAQ,CAAC;QACb,GAAG;YACD,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACtC,IAAI,QAAQ,CAAC,IAAI,EAAE;gBACjB,MAAM;aACP;YACD,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC;YAC3B,MAAM,MAAM,GAAI,GAAW,CAAC,OAAO,CAAC;YACpC,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;gBAC9B,MAAM,KAAK,CAAC,gCAAgC,CAAC,CAAC;aAC/C;YAED,QAAQ,MAAM,EAAE;gBACd,KAAK,0DAA0D;oBAC7D;wBACE,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;wBAC/B,IAAI,IAAI,YAAY,UAAU,KAAK,KAAK,EAAE;4BACxC,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;yBACpD;wBACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE;4BACjC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;yBACxB;qBACF;oBACD,MAAM;gBACR,KAAK,wDAAwD;oBAC3D;wBACE,MAAM,YAAY,GAAI,GAAW,CAAC,YAAY,CAAC;wBAC/C,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;4BACpC,MAAM,KAAK,CAAC,+CAA+C,CAAC,CAAC;yBAC9D;wBACD,IAAI,IAAI,CAAC,UAAU,EAAE;4BACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC,CAAC;yBAChD;qBACF;oBACD,MAAM;gBACR,KAAK,mDAAmD;oBACtD,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnB,MAAM,UAAU,GAAI,GAAW,CAAC,UAAU,CAAC;wBAC3C,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BAClC,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;yBACvD;wBACD,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;qBAC9C;oBACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBACR,KAAK,qDAAqD;oBACxD,IAAI,IAAI,CAAC,OAAO,EAAE;wBAChB,MAAM,KAAK,GAAI,GAAW,CAAC,KAAK,CAAC;wBACjC,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;4BAC9B,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;yBACpD;wBACD,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;wBAC/B,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;4BAC5B,MAAM,KAAK,CAAC,oCAAoC,CAAC,CAAC;yBACnD;wBACD,MAAM,WAAW,GAAI,GAAW,CAAC,WAAW,CAAC;wBAC7C,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnC,MAAM,KAAK,CAAC,2CAA2C,CAAC,CAAC;yBAC1D;wBACD,MAAM,QAAQ,GAAI,GAAW,CAAC,QAAQ,CAAC;wBACvC,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;4BAChC,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;yBACvD;wBACD,IAAI,CAAC,OAAO,CAAC;4BACX,QAAQ;4BACR,IAAI;4BACJ,OAAO,EAAE,KAAK;4BACd,WAAW;yBACZ,CAAC,CAAC;qBACJ;oBACD,MAAM;gBACR;oBACE,MAAM,KAAK,CAAC,kBAAkB,MAAM,2BAA2B,CAAC,CAAC;aACpE;SACF,QAAQ,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable } from \"stream\";\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TransferProgressEvent } from \"@azure/core-http\";\n\nimport { AvroReadableFromStream, AvroReader } from \"../../../storage-internal-avro/src\";\nimport { BlobQueryError } from \"../Clients\";\n\nexport interface BlobQuickQueryStreamOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nexport class BlobQuickQueryStream extends Readable {\n private source: NodeJS.ReadableStream;\n private avroReader: AvroReader;\n private avroIter: AsyncIterableIterator;\n private avroPaused: boolean = true;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private onError?: (error: BlobQueryError) => void;\n\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n public constructor(source: NodeJS.ReadableStream, options: BlobQuickQueryStreamOptions = {}) {\n super();\n this.source = source;\n this.onProgress = options.onProgress;\n this.onError = options.onError;\n this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));\n this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n }\n\n public _read(): void {\n if (this.avroPaused) {\n this.readInternal().catch((err) => {\n this.emit(\"error\", err);\n });\n }\n }\n\n private async readInternal() {\n this.avroPaused = false;\n let avroNext;\n do {\n avroNext = await this.avroIter.next();\n if (avroNext.done) {\n break;\n }\n const obj = avroNext.value;\n const schema = (obj as any).$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n {\n const data = (obj as any).data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n {\n const bytesScanned = (obj as any).bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n const totalBytes = (obj as any).totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n const fatal = (obj as any).fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n const name = (obj as any).name;\n if (typeof name !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n const description = (obj as any).description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n const position = (obj as any).position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position,\n name,\n isFatal: fatal,\n description,\n });\n }\n break;\n default:\n throw Error(`Unknown schema ${schema} in avro progress record.`);\n }\n } while (!avroNext.done && !this.avroPaused);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js new file mode 100644 index 0000000..d2e1dc6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +export class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; +//# sourceMappingURL=Mutex.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map new file mode 100644 index 0000000..f437159 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Mutex.js","sourceRoot":"","sources":["../../../../src/utils/Mutex.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,IAAK,eAGJ;AAHD,WAAK,eAAe;IAClB,yDAAM,CAAA;IACN,6DAAQ,CAAA;AACV,CAAC,EAHI,eAAe,KAAf,eAAe,QAGnB;AAID;;GAEG;AACH,MAAM,OAAO,KAAK;IAChB;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAW;QAClC,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,EAAE;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,QAAQ,EAAE;gBAC/E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;gBACxC,OAAO,EAAE,CAAC;aACX;iBAAM;gBACL,IAAI,CAAC,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE;oBAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;oBACxC,OAAO,EAAE,CAAC;gBACZ,CAAC,CAAC,CAAC;aACJ;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,GAAW;QACpC,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,EAAE;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,MAAM,EAAE;gBAC7C,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;aAC3B;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACtB,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;IACL,CAAC;IAKO,MAAM,CAAC,aAAa,CAAC,GAAW,EAAE,OAAiB;QACzD,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;YACrC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;SACjC;aAAM;YACL,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;SACnC;IACH,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,GAAW;QACxC,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACvE,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;YAC5C,YAAY,CAAC,GAAG,EAAE;gBAChB,OAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACtB,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;;AAlBc,UAAI,GAAuC,EAAE,CAAC;AAC9C,eAAS,GAAkC,EAAE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nenum MutexLockStatus {\n LOCKED,\n UNLOCKED,\n}\n\ntype Callback = (...args: any[]) => any;\n\n/**\n * An async mutex lock.\n */\nexport class Mutex {\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n public static async lock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n } else {\n this.onUnlockEvent(key, () => {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n });\n }\n\n /**\n * Unlock a key.\n *\n * @param key -\n */\n public static async unlock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === MutexLockStatus.LOCKED) {\n this.emitUnlockEvent(key);\n }\n delete this.keys[key];\n resolve();\n });\n }\n\n private static keys: { [key: string]: MutexLockStatus } = {};\n private static listeners: { [key: string]: Callback[] } = {};\n\n private static onUnlockEvent(key: string, handler: Callback) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n } else {\n this.listeners[key].push(handler);\n }\n }\n\n private static emitUnlockEvent(key: string) {\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n const handler = this.listeners[key].shift();\n setImmediate(() => {\n handler!.call(this);\n });\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js new file mode 100644 index 0000000..2d58e9d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +export class RetriableReadableStream extends Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} +//# sourceMappingURL=RetriableReadableStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map new file mode 100644 index 0000000..6a24ff3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RetriableReadableStream.js","sourceRoot":"","sources":["../../../../src/utils/RetriableReadableStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAgClC;;;;GAIG;AACH,MAAM,OAAO,uBAAwB,SAAQ,QAAQ;IAWnD;;;;;;;;;OASG;IACH,YACE,MAA6B,EAC7B,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,UAA0C,EAAE;QAE5C,KAAK,CAAC,EAAE,aAAa,EAAE,OAAO,CAAC,aAAa,EAAE,CAAC,CAAC;QAtB1C,YAAO,GAAW,CAAC,CAAC;QAoDpB,sBAAiB,GAAG,CAAC,IAAY,EAAE,EAAE;YAC3C,IAAI,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;gBAClC,IAAI,CAAC,OAAO,CAAC,iBAAiB,GAAG,SAAS,CAAC;gBAC3C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;gBACpB,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC;gBACvC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACxB,OAAO;aACR;YAED,eAAe;YACf,2EAA2E;YAC3E,KAAK;YACL,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;aAC5D;YACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;gBACpB,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;aACrB;QACH,CAAC,CAAC;QAEM,4BAAuB,GAAG,CAAC,GAAW,EAAE,EAAE;YAChD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAClB,OAAO;aACR;YAED,eAAe;YACf,kDAAkD;YAClD,kBAAkB;YAClB,+BAA+B;YAC/B,KAAK;YACL,IAAI,CAAC,yBAAyB,EAAE,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;gBAChC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACjB;iBAAM,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE;gBAClC,eAAe;gBACf,gEAAgE;gBAChE,KAAK;gBACL,IAAI,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,gBAAgB,EAAE;oBACxC,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC;oBAClB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;yBACrB,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE;wBAClB,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;wBACxB,IAAI,CAAC,sBAAsB,EAAE,CAAC;wBAC9B,OAAO;oBACT,CAAC,CAAC;yBACD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;wBACf,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;oBACtB,CAAC,CAAC,CAAC;iBACN;qBAAM;oBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,sHACE,IAAI,CAAC,MAAM,GAAG,CAChB,yBAAyB,IAAI,CAAC,GAAG,cAAc,IAAI,CAAC,OAAO,kBACzD,IAAI,CAAC,gBACP,EAAE,CACH,CACF,CAAC;iBACH;aACF;iBAAM;gBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,4FACE,IAAI,CAAC,GACP,sBAAsB,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CACxC,CACF,CAAC;aACH;QACH,CAAC,CAAC;QAnGA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,gBAAgB;YACnB,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3F,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,IAAI,CAAC,sBAAsB,EAAE,CAAC;IAChC,CAAC;IAEM,KAAK;QACV,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;IACvB,CAAC;IAEO,sBAAsB;QAC5B,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;IACxD,CAAC;IAEO,yBAAyB;QAC/B,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC3D,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAChE,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;IACpE,CAAC;IA0ED,QAAQ,CAAC,KAAmB,EAAE,QAAiC;QAC7D,iDAAiD;QACjD,IAAI,CAAC,yBAAyB,EAAE,CAAC;QAChC,IAAI,CAAC,MAAmB,CAAC,OAAO,EAAE,CAAC;QAEpC,QAAQ,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { TransferProgressEvent } from \"@azure/core-http\";\nimport { Readable } from \"stream\";\n\nexport type ReadableStreamGetter = (offset: number) => Promise;\n\nexport interface RetriableReadableStreamOptions {\n /**\n * Max retry count (greater than or equal to 0), undefined or invalid value means no retry\n */\n maxRetryRequests?: number;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Debug purpose only. Used to inject an unexpected end to existing internal stream,\n * to test stream retry works well or not.\n *\n * When assign it to true, for next incoming \"data\" event of internal stream,\n * RetriableReadableStream will try to emit an \"end\" event to existing internal\n * stream to force it end and start retry from the breaking point.\n * The value will then update to \"undefined\", once the injection works.\n */\n doInjectErrorOnce?: boolean;\n\n /**\n * A threshold, not a limit. Dictates the amount of data that a stream buffers before it stops asking for more data.\n */\n highWaterMark?: number;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nexport class RetriableReadableStream extends Readable {\n private start: number;\n private offset: number;\n private end: number;\n private getter: ReadableStreamGetter;\n private source: NodeJS.ReadableStream;\n private retries: number = 0;\n private maxRetryRequests: number;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private options: RetriableReadableStreamOptions;\n\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n public constructor(\n source: NodeJS.ReadableStream,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n super({ highWaterMark: options.highWaterMark });\n this.getter = getter;\n this.source = source;\n this.start = offset;\n this.offset = offset;\n this.end = offset + count - 1;\n this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n this.onProgress = options.onProgress;\n this.options = options;\n\n this.setSourceEventHandlers();\n }\n\n public _read(): void {\n this.source.resume();\n }\n\n private setSourceEventHandlers() {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private removeSourceEventHandlers() {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private sourceDataHandler = (data: Buffer) => {\n if (this.options.doInjectErrorOnce) {\n this.options.doInjectErrorOnce = undefined;\n this.source.pause();\n this.source.removeAllListeners(\"data\");\n this.source.emit(\"end\");\n return;\n }\n\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n this.offset += data.length;\n if (this.onProgress) {\n this.onProgress({ loadedBytes: this.offset - this.start });\n }\n if (!this.push(data)) {\n this.source.pause();\n }\n };\n\n private sourceErrorOrEndHandler = (err?: Error) => {\n if (err && err.name === \"AbortError\") {\n this.destroy(err);\n return;\n }\n\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n this.removeSourceEventHandlers();\n if (this.offset - 1 === this.end) {\n this.push(null);\n } else if (this.offset <= this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (this.retries < this.maxRetryRequests) {\n this.retries += 1;\n this.getter(this.offset)\n .then((newSource) => {\n this.source = newSource;\n this.setSourceEventHandlers();\n return;\n })\n .catch((error) => {\n this.destroy(error);\n });\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${\n this.offset - 1\n }, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${\n this.maxRetryRequests\n }`\n )\n );\n }\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: Received more data than original request, data needed offset is ${\n this.end\n }, received offset: ${this.offset - 1}`\n )\n );\n }\n };\n\n _destroy(error: Error | null, callback: (error?: Error) => void): void {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n (this.source as Readable).destroy();\n\n callback(error === null ? undefined : error);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js new file mode 100644 index 0000000..131ba87 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { DefaultHttpClient } from "@azure/core-http"; +const _defaultHttpClient = new DefaultHttpClient(); +export function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} +//# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map new file mode 100644 index 0000000..8156a9f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cache.js","sourceRoot":"","sources":["../../../../src/utils/cache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAGrD,MAAM,kBAAkB,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAEnD,MAAM,UAAU,0BAA0B;IACxC,OAAO,kBAAkB,CAAC;AAC5B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"@azure/core-http\";\nimport { IHttpClient } from \"../Pipeline\";\n\nconst _defaultHttpClient = new DefaultHttpClient();\n\nexport function getCachedDefaultHttpClient(): IHttpClient {\n return _defaultHttpClient;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js new file mode 100644 index 0000000..bbb62f9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js @@ -0,0 +1,223 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const SDK_VERSION = "12.17.0"; +export const SERVICE_VERSION = "2023-11-03"; +export const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +export const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +export const BLOCK_BLOB_MAX_BLOCKS = 50000; +export const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +export const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +export const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +export const REQUEST_TIMEOUT = 100 * 1000; // In ms +/** + * The OAuth scope to use with Azure Storage. + */ +export const StorageOAuthScopes = "https://storage.azure.com/.default"; +export const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +export const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +export const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +export const ETagNone = ""; +export const ETagAny = "*"; +export const SIZE_1_MB = 1 * 1024 * 1024; +export const BATCH_MAX_REQUEST = 256; +export const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +export const HTTP_LINE_ENDING = "\r\n"; +export const HTTP_VERSION_1_1 = "HTTP/1.1"; +export const EncryptionAlgorithmAES25 = "AES256"; +export const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +export const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +export const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +export const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +export const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +export const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map new file mode 100644 index 0000000..3c540e4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../../src/utils/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,WAAW,GAAW,SAAS,CAAC;AAC7C,MAAM,CAAC,MAAM,eAAe,GAAW,YAAY,CAAC;AAEpD,MAAM,CAAC,MAAM,gCAAgC,GAAW,GAAG,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ;AACnF,MAAM,CAAC,MAAM,gCAAgC,GAAW,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,SAAS;AACrF,MAAM,CAAC,MAAM,qBAAqB,GAAW,KAAK,CAAC;AACnD,MAAM,CAAC,MAAM,+BAA+B,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,MAAM;AAC9E,MAAM,CAAC,MAAM,iCAAiC,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,MAAM;AAChF,MAAM,CAAC,MAAM,mCAAmC,GAAW,CAAC,CAAC;AAE7D,MAAM,CAAC,MAAM,eAAe,GAAW,GAAG,GAAG,IAAI,CAAC,CAAC,QAAQ;AAC3D;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAsB,oCAAoC,CAAC;AAE1F,MAAM,CAAC,MAAM,YAAY,GAAG;IAC1B,UAAU,EAAE;QACV,sBAAsB,EAAE,GAAG;QAC3B,SAAS,EAAE,KAAK;QAChB,QAAQ,EAAE,UAAU;QACpB,SAAS,EAAE,WAAW;QACtB,OAAO,EAAE,SAAS;KACnB;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAG;IAC/B,aAAa,EAAE,GAAG;IAClB,aAAa,EAAE,GAAG;IAClB,cAAc,EAAE,GAAG;IACnB,kBAAkB,EAAE,GAAG;IACvB,0BAA0B,EAAE,GAAG;CAChC,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAG;IAC7B,aAAa,EAAE,eAAe;IAC9B,oBAAoB,EAAE,QAAQ;IAC9B,gBAAgB,EAAE,kBAAkB;IACpC,UAAU,EAAE,YAAY;IACxB,gBAAgB,EAAE,kBAAkB;IACpC,cAAc,EAAE,gBAAgB;IAChC,WAAW,EAAE,aAAa;IAC1B,yBAAyB,EAAE,2BAA2B;IACtD,YAAY,EAAE,cAAc;IAC5B,MAAM,EAAE,QAAQ;IAChB,IAAI,EAAE,MAAM;IACZ,QAAQ,EAAE,UAAU;IACpB,iBAAiB,EAAE,mBAAmB;IACtC,aAAa,EAAE,eAAe;IAC9B,mBAAmB,EAAE,qBAAqB;IAC1C,kBAAkB,EAAE,OAAO;IAC3B,KAAK,EAAE,OAAO;IACd,UAAU,EAAE,YAAY;IACxB,sBAAsB,EAAE,wBAAwB;IAChD,gBAAgB,EAAE,kBAAkB;IACpC,SAAS,EAAE,WAAW;IACtB,eAAe,EAAE,iBAAiB;IAClC,YAAY,EAAE,cAAc;CAC7B,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAG,EAAE,CAAC;AAC3B,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,CAAC;AAE3B,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AACzC,MAAM,CAAC,MAAM,iBAAiB,GAAG,GAAG,CAAC;AACrC,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,GAAG,SAAS,CAAC;AACxD,MAAM,CAAC,MAAM,gBAAgB,GAAG,MAAM,CAAC;AACvC,MAAM,CAAC,MAAM,gBAAgB,GAAG,UAAU,CAAC;AAE3C,MAAM,CAAC,MAAM,wBAAwB,GAAG,QAAQ,CAAC;AAEjD,MAAM,CAAC,MAAM,2BAA2B,GAAG,sNAAsN,CAAC;AAElQ,MAAM,CAAC,MAAM,oCAAoC,GAAG;IAClD,6BAA6B;IAC7B,eAAe;IACf,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,YAAY;IACZ,aAAa;IACb,mBAAmB;IACnB,YAAY;IACZ,wBAAwB;IACxB,WAAW;IACX,iBAAiB;IACjB,iBAAiB;IACjB,+BAA+B;IAC/B,cAAc;IACd,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,kBAAkB;IAClB,aAAa;IACb,eAAe;IACf,MAAM;IACN,eAAe;IACf,QAAQ;IACR,MAAM;IACN,oBAAoB;IACpB,kBAAkB;IAClB,2BAA2B;IAC3B,cAAc;IACd,oBAAoB;IACpB,kBAAkB;IAClB,8BAA8B;IAC9B,qBAAqB;IACrB,kBAAkB;IAClB,mBAAmB;IACnB,YAAY;IACZ,+BAA+B;IAC/B,uBAAuB;IACvB,eAAe;IACf,mBAAmB;IACnB,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,8BAA8B;IAC9B,2BAA2B;IAC3B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,yBAAyB;IACzB,iCAAiC;IACjC,+BAA+B;IAC/B,6BAA6B;IAC7B,+BAA+B;IAC/B,4BAA4B;IAC5B,4BAA4B;IAC5B,0BAA0B;IAC1B,uBAAuB;IACvB,wBAAwB;IACxB,yBAAyB;IACzB,2BAA2B;IAC3B,gBAAgB;IAChB,gCAAgC;IAChC,oBAAoB;IACpB,+BAA+B;IAC/B,uBAAuB;IACvB,4BAA4B;IAC5B,qCAAqC;IACrC,2BAA2B;IAC3B,4BAA4B;IAC5B,4BAA4B;IAC5B,4BAA4B;IAC5B,uBAAuB;IACvB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,eAAe;IACf,iBAAiB;IACjB,iBAAiB;IACjB,wBAAwB;IACxB,4BAA4B;IAC5B,yBAAyB;IACzB,6BAA6B;IAC7B,eAAe;IACf,yBAAyB;IACzB,sBAAsB;IACtB,+BAA+B;IAC/B,2BAA2B;IAC3B,iCAAiC;IACjC,gBAAgB;IAChB,4BAA4B;IAC5B,cAAc;IACd,qBAAqB;CACtB,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAAG;IACtD,MAAM;IACN,YAAY;IACZ,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,IAAI;IACJ,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,IAAI;IACJ,IAAI;IACJ,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,SAAS;IACT,eAAe;IACf,WAAW;IACX,cAAc;IACd,KAAK;IACL,OAAO;IACP,KAAK;IACL,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;CACX,CAAC;AAEF,MAAM,CAAC,MAAM,sCAAsC,GAAG,qCAAqC,CAAC;AAC5F,MAAM,CAAC,MAAM,yCAAyC,GACpD,2CAA2C,CAAC;AAE9C,iDAAiD;AACjD,wGAAwG;AACxG,MAAM,CAAC,MAAM,cAAc,GAAG;IAC5B,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;CACR,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const SDK_VERSION: string = \"12.17.0\";\nexport const SERVICE_VERSION: string = \"2023-11-03\";\n\nexport const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES: number = 256 * 1024 * 1024; // 256MB\nexport const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES: number = 4000 * 1024 * 1024; // 4000MB\nexport const BLOCK_BLOB_MAX_BLOCKS: number = 50000;\nexport const DEFAULT_BLOCK_BUFFER_SIZE_BYTES: number = 8 * 1024 * 1024; // 8MB\nexport const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES: number = 4 * 1024 * 1024; // 4MB\nexport const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS: number = 5;\n\nexport const REQUEST_TIMEOUT: number = 100 * 1000; // In ms\n/**\n * The OAuth scope to use with Azure Storage.\n */\nexport const StorageOAuthScopes: string | string[] = \"https://storage.azure.com/.default\";\n\nexport const URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\",\n },\n};\n\nexport const HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416,\n};\n\nexport const HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\",\n};\n\nexport const ETagNone = \"\";\nexport const ETagAny = \"*\";\n\nexport const SIZE_1_MB = 1 * 1024 * 1024;\nexport const BATCH_MAX_REQUEST = 256;\nexport const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nexport const HTTP_LINE_ENDING = \"\\r\\n\";\nexport const HTTP_VERSION_1_1 = \"HTTP/1.1\";\n\nexport const EncryptionAlgorithmAES25 = \"AES256\";\n\nexport const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;\n\nexport const StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\",\n];\n\nexport const StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\",\n];\n\nexport const BlobUsesCustomerSpecifiedEncryptionMsg = \"BlobUsesCustomerSpecifiedEncryption\";\nexport const BlobDoesNotUseCustomerSpecifiedEncryption =\n \"BlobDoesNotUseCustomerSpecifiedEncryption\";\n\n/// List of ports used for path style addressing.\n/// Path style addressing means that storage account is put in URI's Path segment in instead of in host.\nexport const PathStylePorts = [\n \"10000\",\n \"10001\",\n \"10002\",\n \"10003\",\n \"10004\",\n \"10100\",\n \"10101\",\n \"10102\",\n \"10103\",\n \"10104\",\n \"11000\",\n \"11001\",\n \"11002\",\n \"11003\",\n \"11004\",\n \"11100\",\n \"11101\",\n \"11102\",\n \"11103\",\n \"11104\",\n];\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js new file mode 100644 index 0000000..66a4527 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-tracing"; +/** + * Creates a span using the global tracer. + * @internal + */ +export const createSpan = createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +export function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map new file mode 100644 index 0000000..82a88ca --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../../../src/utils/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAEzD;;;GAGG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG,kBAAkB,CAAC;IAC3C,aAAa,EAAE,oBAAoB;IACnC,SAAS,EAAE,mBAAmB;CAC/B,CAAC,CAAC;AAEH;;;;;;GAMG;AACH,MAAM,UAAU,kCAAkC,CAChD,OAA0B;;IAE1B,OAAO;QACL,+HAA+H;QAC/H,WAAW,EAAE,MAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,cAAsB,0CAAE,WAAW;QAC1D,cAAc,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,cAAc,0CAAE,cAAc;KACxD,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { OperationOptions, RequestOptionsBase } from \"@azure/core-http\";\nimport { createSpanFunction } from \"@azure/core-tracing\";\n\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nexport const createSpan = createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\",\n});\n\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nexport function convertTracingToRequestOptionsBase(\n options?: OperationOptions\n): Pick {\n return {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n spanOptions: (options?.tracingOptions as any)?.spanOptions,\n tracingContext: options?.tracingOptions?.tracingContext,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js new file mode 100644 index 0000000..8b76ae8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Convert a Browser Blob object into ArrayBuffer. + * + * @param blob - + */ +export async function blobToArrayBuffer(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsArrayBuffer(blob); + }); +} +/** + * Convert a Browser Blob object into string. + * + * @param blob - + */ +export async function blobToString(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsText(blob); + }); +} +export function streamToBuffer() { + /* empty */ +} +export function streamToBuffer2() { + /* empty */ +} +export function readStreamToLocalFile() { + /* empty */ +} +export const fsStat = function stat() { + /* empty */ +}; +export const fsCreateReadStream = function createReadStream() { + /* empty */ +}; +//# sourceMappingURL=utils.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map new file mode 100644 index 0000000..be89432 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.browser.js","sourceRoot":"","sources":["../../../../src/utils/utils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,IAAU;IAChD,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,OAAO,IAAI,OAAO,CAAc,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAClD,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;YACjC,OAAO,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC;QACF,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC;QAC5B,UAAU,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAAC,IAAU;IAC3C,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;YACjC,OAAO,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC;QACF,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC;QAC5B,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,cAAc;IAC5B,WAAW;AACb,CAAC;AAED,MAAM,UAAU,eAAe;IAC7B,WAAW;AACb,CAAC;AAED,MAAM,UAAU,qBAAqB;IACnC,WAAW;AACb,CAAC;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,SAAS,IAAI;IACjC,WAAW;AACb,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,SAAS,gBAAgB;IACzD,WAAW;AACb,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Convert a Browser Blob object into ArrayBuffer.\n *\n * @param blob -\n */\nexport async function blobToArrayBuffer(blob: Blob): Promise {\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n fileReader.onloadend = (ev: any) => {\n resolve(ev.target!.result);\n };\n fileReader.onerror = reject;\n fileReader.readAsArrayBuffer(blob);\n });\n}\n\n/**\n * Convert a Browser Blob object into string.\n *\n * @param blob -\n */\nexport async function blobToString(blob: Blob): Promise {\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n fileReader.onloadend = (ev: any) => {\n resolve(ev.target!.result);\n };\n fileReader.onerror = reject;\n fileReader.readAsText(blob);\n });\n}\n\nexport function streamToBuffer(): void {\n /* empty */\n}\n\nexport function streamToBuffer2(): void {\n /* empty */\n}\n\nexport function readStreamToLocalFile(): void {\n /* empty */\n}\n\nexport const fsStat = function stat(): void {\n /* empty */\n};\n\nexport const fsCreateReadStream = function createReadStream(): void {\n /* empty */\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js new file mode 100644 index 0000000..7faa0b5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js @@ -0,0 +1,716 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders, isNode, URLBuilder } from "@azure/core-http"; +import { DevelopmentConnectionString, HeaderConstants, PathStylePorts, URLConstants, } from "./constants"; +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +export function escapeURLPath(url) { + const urlParsed = URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +export function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +export function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +export function appendToURLPath(url, name) { + const urlParsed = URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + const normalizedUrl = new URL(urlParsed.toString()); + return normalizedUrl.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +export function setURLParameter(url, name, value) { + const urlParsed = URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +export function getURLParameter(url, name) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +export function setURLHost(url, host) { + const urlParsed = URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +export function getURLPath(url) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +export function getURLScheme(url) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +export function getURLPathAndQuery(url) { + const urlParsed = URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +export function getURLQueries(url) { + let queryString = URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +export function appendToURLQuery(url, queryParts) { + const urlParsed = URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +export function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +export function base64encode(content) { + return !isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Base64 decode. + * + * @param encodedString - + */ +export function base64decode(encodedString) { + return !isNode ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +export function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +export async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +export function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +export function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, "*****"); + } + return safeURL; +} +export function sanitizeHeaders(originalHeader) { + const headers = new HttpHeaders(); + for (const header of originalHeader.headersArray()) { + if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(header.name, "*****"); + } + else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(header.name, sanitizeURL(header.value)); + } + else { + headers.set(header.name, header.value); + } + } + return headers; +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +export function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +export function getAccountNameFromUrl(url) { + const parsedUrl = URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +export function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +export function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +export function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +export function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +export function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +export function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +export function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +export function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +export function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +export function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +export function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +export function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} +/** + * Escape the blobName but keep path separator ('/'). + */ +export function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); +} +//# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map new file mode 100644 index 0000000..30f8931 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.common.js","sourceRoot":"","sources":["../../../../src/utils/utils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,UAAU,EAAmB,MAAM,kBAAkB,CAAC;AAiBpF,OAAO,EACL,2BAA2B,EAC3B,eAAe,EACf,cAAc,EACd,YAAY,GACb,MAAM,aAAa,CAAC;AAiBrB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmDG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IAC/B,IAAI,GAAG,IAAI,IAAI,GAAG,CAAC;IAEnB,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC;IACpB,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAWD,SAAS,4BAA4B,CAAC,gBAAwB;IAC5D,gCAAgC;IAChC,sKAAsK;IACtK,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,KAAK,CAAC,CAAC,EAAE;QACjE,4FAA4F;QAC5F,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACrD,KAAK,MAAM,OAAO,IAAI,gBAAgB,EAAE;YACtC,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,iCAAiC,CAAE,CAAC,CAAC,CAAC,CAAC;aACxE;SACF;KACF;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,gBAAwB,EACxB,QAM2B;IAE3B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC7C,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YACvC,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAE,CAAC,CAAC,CAAC,CAAC;SACrD;KACF;IACD,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,4BAA4B,CAAC,gBAAwB;IACnE,IAAI,QAAQ,GAAG,EAAE,CAAC;IAElB,IAAI,gBAAgB,CAAC,UAAU,CAAC,4BAA4B,CAAC,EAAE;QAC7D,gCAAgC;QAChC,QAAQ,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QAC1D,gBAAgB,GAAG,2BAA2B,CAAC;KAChD;IAED,yDAAyD;IACzD,IAAI,YAAY,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;IAC1E,uCAAuC;IACvC,kGAAkG;IAClG,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;IAErF,IACE,gBAAgB,CAAC,MAAM,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC;QAC3D,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAC7C;QACA,4BAA4B;QAE5B,IAAI,wBAAwB,GAAG,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,EAAE,CAAC;QACrB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACrD,IAAI,cAAc,GAAG,EAAE,CAAC;QAExB,2BAA2B;QAC3B,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;QACpE,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,gBAAgB,EAAE,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEzF,IAAI,CAAC,YAAY,EAAE;YACjB,+DAA+D;YAC/D,6FAA6F;YAE7F,wBAAwB,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,0BAA0B,CAAC,CAAC;YAC9F,MAAM,QAAQ,GAAG,wBAAyB,CAAC,WAAW,EAAE,CAAC;YACzD,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;gBAC/C,MAAM,IAAI,KAAK,CACb,iGAAiG,CAClG,CAAC;aACH;YAED,cAAc,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;YAC1E,IAAI,CAAC,cAAc,EAAE;gBACnB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;aAC7E;YACD,YAAY,GAAG,GAAG,wBAAwB,MAAM,WAAW,SAAS,cAAc,EAAE,CAAC;SACtF;QAED,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;SAC1E;aAAM,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;SACzE;QAED,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,GAAG,EAAE,YAAY;YACjB,WAAW;YACX,UAAU;YACV,QAAQ;SACT,CAAC;KACH;SAAM;QACL,wBAAwB;QAExB,MAAM,UAAU,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,uBAAuB,CAAC,CAAC;QACnF,IAAI,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;QACxE,4DAA4D;QAC5D,IAAI,CAAC,WAAW,EAAE;YAChB,WAAW,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;SACnD;QACD,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;aAAM,IAAI,CAAC,UAAU,EAAE;YACtB,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;SACxF;QAED,OAAO,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC;KAC9E;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,MAAM,CAAC,IAAY;IAC1B,OAAO,kBAAkB,CAAC,IAAI,CAAC;SAC5B,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,uBAAuB;SAC5C,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,iBAAiB;SACtC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,qBAAqB;AAChD,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY;IACvD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IAC/B,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACjF,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,MAAM,aAAa,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,CAAC;IAEpD,OAAO,aAAa,CAAC,QAAQ,EAAE,CAAC;AAClC,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY,EAAE,KAAc;IACvE,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,SAAS,CAAC,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IACzC,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY;IACvD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;AAChD,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,IAAY;IAClD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACxB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,OAAO,EAAE,CAAC;AAC7B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,GAAW;IACtC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC;AAC/B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,GAAW;IAC5C,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IACvC,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAC;KACzD;IAED,IAAI,WAAW,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;IAC7C,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,IAAI,WAAW,KAAK,EAAE,EAAE;QACtB,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,CAAC,qCAAqC;KACnH;IAED,OAAO,GAAG,UAAU,GAAG,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,IAAI,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;IACnD,IAAI,CAAC,WAAW,EAAE;QAChB,OAAO,EAAE,CAAC;KACX;IAED,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;IAEhF,IAAI,eAAe,GAAa,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACvD,eAAe,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,KAAa,EAAE,EAAE;QACzD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACxC,MAAM,gBAAgB,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QAChD,OAAO,CACL,YAAY,GAAG,CAAC,IAAI,YAAY,KAAK,gBAAgB,IAAI,gBAAgB,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAC7F,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,MAAM,OAAO,GAA8B,EAAE,CAAC;IAC9C,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;QAC5C,MAAM,YAAY,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC/C,MAAM,GAAG,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,KAAK,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;QACtC,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACtB;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,gBAAgB,CAAC,GAAW,EAAE,UAAkB;IAC9D,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,KAAK,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;IACjC,IAAI,KAAK,EAAE;QACT,KAAK,IAAI,GAAG,GAAG,UAAU,CAAC;KAC3B;SAAM;QACL,KAAK,GAAG,UAAU,CAAC;KACpB;IAED,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1B,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,oBAAoB,CAAC,IAAU,EAAE,mBAA4B,IAAI;IAC/E,iEAAiE;IACjE,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;IAEtC,OAAO,gBAAgB;QACrB,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG;QAC/D,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AAC3D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,OAAe;IAC1C,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC3E,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,aAAqB;IAChD,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;AACzF,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,aAAqB,EAAE,UAAkB;IACvE,mEAAmE;IACnE,MAAM,qBAAqB,GAAG,EAAE,CAAC;IAEjC,4EAA4E;IAC5E,MAAM,mBAAmB,GAAG,CAAC,CAAC;IAE9B,MAAM,6BAA6B,GAAG,qBAAqB,GAAG,mBAAmB,CAAC;IAElF,IAAI,aAAa,CAAC,MAAM,GAAG,6BAA6B,EAAE;QACxD,aAAa,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,6BAA6B,CAAC,CAAC;KACvE;IACD,MAAM,GAAG,GACP,aAAa;QACb,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,EAAE,qBAAqB,GAAG,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACrF,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,KAAK,CACzB,QAAgB,EAChB,OAAyB,EACzB,UAAkB;IAElB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,2CAA2C;QAC3C,IAAI,OAAY,CAAC;QAEjB,MAAM,YAAY,GAAG,GAAG,EAAE;YACxB,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,YAAY,CAAC,OAAO,CAAC,CAAC;aACvB;YACD,MAAM,CAAC,UAAU,CAAC,CAAC;QACrB,CAAC,CAAC;QAEF,MAAM,cAAc,GAAG,GAAG,EAAE;YAC1B,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;aACpD;YACD,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC;QAEF,OAAO,GAAG,UAAU,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QAE/C,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;SACjD;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,QAAQ,CACtB,aAAqB,EACrB,YAAoB,EACpB,YAAoB,GAAG;IAEvB,+EAA+E;IAC/E,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE;QAC7B,OAAO,aAAa,CAAC,QAAQ,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;KACxD;IAED,SAAS,GAAG,SAAS,IAAI,GAAG,CAAC;IAC7B,IAAI,aAAa,CAAC,MAAM,GAAG,YAAY,EAAE;QACvC,OAAO,aAAa,CAAC;KACtB;SAAM;QACL,YAAY,GAAG,YAAY,GAAG,aAAa,CAAC,MAAM,CAAC;QACnD,IAAI,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE;YACnC,SAAS,IAAI,SAAS,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;SAChE;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,GAAG,aAAa,CAAC;KACzD;AACH,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,IAAI,OAAO,GAAW,GAAG,CAAC;IAC1B,IAAI,eAAe,CAAC,OAAO,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;QAC/D,OAAO,GAAG,eAAe,CAAC,OAAO,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;KAChF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,MAAM,UAAU,eAAe,CAAC,cAA2B;IACzD,MAAM,OAAO,GAAgB,IAAI,WAAW,EAAE,CAAC;IAC/C,KAAK,MAAM,MAAM,IAAI,cAAc,CAAC,YAAY,EAAE,EAAE;QAClD,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,eAAe,CAAC,aAAa,CAAC,WAAW,EAAE,EAAE;YAC7E,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;SACnC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,eAAe,CAAC,gBAAgB,EAAE;YACzE,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;SACrD;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACxC;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AACD;;;;;GAKG;AACH,MAAM,UAAU,MAAM,CAAC,IAAY,EAAE,IAAY;IAC/C,OAAO,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC/D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CAAC,GAAW;IAC/C,MAAM,SAAS,GAAe,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACpD,IAAI,WAAW,CAAC;IAChB,IAAI;QACF,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;YACjD,yEAAyE;YACzE,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SAClD;aAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;YACvC,iFAAiF;YACjF,2GAA2G;YAC3G,mCAAmC;YACnC,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SAClD;aAAM;YACL,qEAAqE;YACrE,WAAW,GAAG,EAAE,CAAC;SAClB;QACD,OAAO,WAAW,CAAC;KACpB;IAAC,OAAO,KAAU,EAAE;QACnB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;KAC7E;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,SAAqB;IACrD,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,EAAE;QACrC,OAAO,KAAK,CAAC;KACd;IAED,MAAM,IAAI,GACR,SAAS,CAAC,OAAO,EAAG,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;IAE9F,sFAAsF;IACtF,wFAAwF;IACxF,wEAAwE;IACxE,wFAAwF;IACxF,OAAO,CACL,mJAAmJ,CAAC,IAAI,CACtJ,IAAI,CACL;QACD,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAG,CAAC,CAAC,CACrF,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB,CAAC,IAAW;IAC1C,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,QAAQ,GAAG,EAAE,CAAC;IACpB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACtB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,GAAG,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;SAC1E;KACF;IAED,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,IAAW;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,GAAG,GAAa;QACpB,UAAU,EAAE,EAAE;KACf,CAAC;IAEF,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACtB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC;gBAClB,GAAG;gBACH,KAAK;aACN,CAAC,CAAC;SACJ;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,MAAM,CAAC,IAAe;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,GAAG,GAAS,EAAE,CAAC;IACrB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;QACrC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;KAClC;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAClC,iBAIiC;IAEjC,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,OAAO,SAAS,CAAC;KAClB;IAED,QAAQ,iBAAiB,CAAC,IAAI,EAAE;QAC9B,KAAK,KAAK;YACR,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,WAAW;oBACjB,0BAA0B,EAAE;wBAC1B,eAAe,EAAE,iBAAiB,CAAC,eAAe,IAAI,GAAG;wBACzD,UAAU,EAAE,iBAAiB,CAAC,UAAU,IAAI,EAAE;wBAC9C,eAAe,EAAE,iBAAiB,CAAC,eAAe;wBAClD,UAAU,EAAE,iBAAiB,CAAC,eAAe,IAAI,EAAE;wBACnD,cAAc,EAAE,iBAAiB,CAAC,UAAU,IAAI,KAAK;qBACtD;iBACF;aACF,CAAC;QACJ,KAAK,MAAM;YACT,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,MAAM;oBACZ,qBAAqB,EAAE;wBACrB,eAAe,EAAE,iBAAiB,CAAC,eAAe;qBACnD;iBACF;aACF,CAAC;QACJ,KAAK,OAAO;YACV,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,OAAO;oBACb,kBAAkB,EAAE;wBAClB,MAAM,EAAE,iBAAiB,CAAC,MAAM;qBACjC;iBACF;aACF,CAAC;QACJ,KAAK,SAAS;YACZ,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,SAAS;iBAChB;aACF,CAAC;QAEJ;YACE,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;KACtD;AACH,CAAC;AAED,MAAM,UAAU,4BAA4B,CAC1C,uBAAgD;IAEhD,IAAI,CAAC,uBAAuB,EAAE;QAC5B,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,WAAW,IAAI,uBAAuB,EAAE;QAC1C,+FAA+F;QAC/F,sFAAsF;QACtF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,YAAY,GAA8B,EAAE,CAAC;IACnD,KAAK,MAAM,GAAG,IAAI,uBAAuB,EAAE;QACzC,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,MAAM,YAAY,GAAG,KAAK,CAAC;QAC3B,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;YACnC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;SAChD;QACD,MAAM,IAAI,GAA0B;YAClC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;YACd,iBAAiB,EAAE,uBAAuB,CAAC,GAAG,CAA4B;SAC3E,CAAC;QACF,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnF,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE;YACpB,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC5C;aAAM;YACL,YAAY,CAAC,IAAI,CAAC;gBAChB,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;gBAChB,KAAK,EAAE,CAAC,IAAI,CAAC;aACd,CAAC,CAAC;SACJ;KACF;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,gBAAgB,CAAI,KAAQ,EAAE,UAA2B;IACtE,KAAa,CAAC,UAAU,GAAG,UAAU,CAAC;IACvC,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,yBAAyB,CACvC,iBAAqC;IAErC,OAAO,iBAAiB,CAAC,CAAC,CAAC,iBAAiB,CAAC,MAAM,GAAG,GAAG,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;AAClG,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAc;IAC7C,IAAI,IAAI,CAAC,OAAO,EAAE;QAChB,OAAO,kBAAkB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;KAC1C;SAAM;QACL,OAAO,IAAI,CAAC,OAAQ,CAAC;KACtB;AACH,CAAC;AAED,MAAM,UAAU,qCAAqC,CACnD,gBAA8C;IAE9C,uCACK,gBAAgB,KACnB,OAAO,EAAE;YACP,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;gBACpE,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;gBACF,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC;SACH,IACD;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,gBAAmD;;IAEnD,uCACK,gBAAgB,KACnB,OAAO,EAAE;YACP,YAAY,EAAE,MAAA,gBAAgB,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,EAAE,EAAE;gBAC9E,MAAM,UAAU,mCACX,kBAAkB,KACrB,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC,GAChD,CAAC;gBACF,OAAO,UAAU,CAAC;YACpB,CAAC,CAAC;YACF,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;gBACpE,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;gBACF,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC;SACH,IACD;AACJ,CAAC;AAED,MAAM,SAAS,CAAC,CAAC,yBAAyB,CACxC,oBAA4D;IAE5D,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAiB,EAAE,CAAC;IAElC,IAAI,oBAAoB,CAAC,SAAS;QAAE,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;IAC/E,IAAI,oBAAoB,CAAC,UAAU;QAAE,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAElF,IAAI,cAAc,GAAG,CAAC,CAAC;IACvB,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,IAAI,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE;QAC/E,IAAI,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE;YACvE,MAAM;gBACJ,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;gBACtC,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;gBAClC,OAAO,EAAE,KAAK;aACf,CAAC;YACF,EAAE,cAAc,CAAC;SAClB;aAAM;YACL,MAAM;gBACJ,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;gBACxC,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;gBACpC,OAAO,EAAE,IAAI;aACd,CAAC;YACF,EAAE,eAAe,CAAC;SACnB;KACF;IAED,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,cAAc,EAAE;QAC1D,MAAM;YACJ,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;YACtC,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;YAClC,OAAO,EAAE,KAAK;SACf,CAAC;KACH;IAED,OAAO,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,eAAe,EAAE;QAC7D,MAAM;YACJ,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;YACxC,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;YACpC,OAAO,EAAE,IAAI;SACd,CAAC;KACH;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CAAC,QAAgB;IACzC,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAClC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,KAAK,CAAC,CAAC,CAAC,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACzC;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpHeaders, isNode, URLBuilder, TokenCredential } from \"@azure/core-http\";\n\nimport {\n BlobQueryArrowConfiguration,\n BlobQueryCsvTextConfiguration,\n BlobQueryJsonTextConfiguration,\n BlobQueryParquetConfiguration,\n} from \"../Clients\";\nimport {\n QuerySerialization,\n BlobTags,\n BlobName,\n ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse,\n PageRange,\n ClearRange,\n} from \"../generated/src/models\";\nimport {\n DevelopmentConnectionString,\n HeaderConstants,\n PathStylePorts,\n URLConstants,\n} from \"./constants\";\nimport {\n Tags,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n HttpAuthorization,\n} from \"../models\";\nimport {\n ListBlobsFlatSegmentResponseModel,\n BlobItemInternal as BlobItemInternalModel,\n ListBlobsHierarchySegmentResponseModel,\n BlobPrefix as BlobPrefixModel,\n PageBlobGetPageRangesDiffResponseModel,\n PageRangeInfo,\n} from \"../generatedModels\";\n\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nexport function escapeURLPath(url: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path || \"/\";\n\n path = escape(path);\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\nexport interface ConnectionString {\n kind: \"AccountConnString\" | \"SASConnString\";\n url: string;\n accountName: string;\n accountKey?: any;\n accountSas?: string;\n proxyUri?: string; // Development Connection String may contain proxyUri\n}\n\nfunction getProxyUriFromDevConnString(connectionString: string): string {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n let proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n const matchCredentials = connectionString.split(\";\");\n for (const element of matchCredentials) {\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")![1];\n }\n }\n }\n return proxyUri;\n}\n\nexport function getValueInConnString(\n connectionString: string,\n argument:\n | \"BlobEndpoint\"\n | \"AccountName\"\n | \"AccountKey\"\n | \"DefaultEndpointsProtocol\"\n | \"EndpointSuffix\"\n | \"SharedAccessSignature\"\n): string {\n const elements = connectionString.split(\";\");\n for (const element of elements) {\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")![1];\n }\n }\n return \"\";\n}\n\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nexport function extractConnectionStringParts(connectionString: string): ConnectionString {\n let proxyUri = \"\";\n\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n\n // Matching BlobEndpoint in the Account connection string\n let blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n\n if (\n connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1\n ) {\n // Account connection string\n\n let defaultEndpointsProtocol = \"\";\n let accountName = \"\";\n let accountKey = Buffer.from(\"accountKey\", \"base64\");\n let endpointSuffix = \"\";\n\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n const protocol = defaultEndpointsProtocol!.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\n \"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\"\n );\n }\n\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n }\n\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n } else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName,\n accountKey,\n proxyUri,\n };\n } else {\n // SAS connection string\n\n const accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n let accountName = getValueInConnString(connectionString, \"AccountName\");\n // if accountName is empty, try to read it from BlobEndpoint\n if (!accountName) {\n accountName = getAccountNameFromUrl(blobEndpoint);\n }\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n } else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n\n return { kind: \"SASConnString\", url: blobEndpoint, accountName, accountSas };\n }\n}\n\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text: string): string {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nexport function appendToURLPath(url: string, name: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? `${path}${name}` : `${path}/${name}`) : name;\n urlParsed.setPath(path);\n\n const normalizedUrl = new URL(urlParsed.toString());\n\n return normalizedUrl.toString();\n}\n\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nexport function setURLParameter(url: string, name: string, value?: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nexport function getURLParameter(url: string, name: string): string | string[] | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nexport function setURLHost(url: string, host: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPath(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLScheme(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPathAndQuery(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n const pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n\n let queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString !== \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : `?${queryString}`; // Ensure query string start with '?'\n }\n\n return `${pathString}${queryString}`;\n}\n\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nexport function getURLQueries(url: string): { [key: string]: string } {\n let queryString = URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n\n let querySubStrings: string[] = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter((value: string) => {\n const indexOfEqual = value.indexOf(\"=\");\n const lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (\n indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1\n );\n });\n\n const queries: { [key: string]: string } = {};\n for (const querySubString of querySubStrings) {\n const splitResults = querySubString.split(\"=\");\n const key: string = splitResults[0];\n const value: string = splitResults[1];\n queries[key] = value;\n }\n\n return queries;\n}\n\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nexport function appendToURLQuery(url: string, queryParts: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n } else {\n query = queryParts;\n }\n\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nexport function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n const dateString = date.toISOString();\n\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n\n/**\n * Base64 encode.\n *\n * @param content -\n */\nexport function base64encode(content: string): string {\n return !isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n\n/**\n * Base64 decode.\n *\n * @param encodedString -\n */\nexport function base64decode(encodedString: string): string {\n return !isNode ? atob(encodedString) : Buffer.from(encodedString, \"base64\").toString();\n}\n\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nexport function generateBlockID(blockIDPrefix: string, blockIndex: number): string {\n // To generate a 64 bytes base64 string, source string should be 48\n const maxSourceStringLength = 48;\n\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n const maxBlockIndexLength = 6;\n\n const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n const res =\n blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nexport async function delay(\n timeInMs: number,\n aborter?: AbortSignalLike,\n abortError?: Error\n): Promise {\n return new Promise((resolve, reject) => {\n /* eslint-disable-next-line prefer-const */\n let timeout: any;\n\n const abortHandler = () => {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n\n const resolveHandler = () => {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n\n timeout = setTimeout(resolveHandler, timeInMs);\n\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n });\n}\n\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nexport function padStart(\n currentString: string,\n targetLength: number,\n padString: string = \" \"\n): string {\n // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n } else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n\nexport function sanitizeURL(url: string): string {\n let safeURL: string = url;\n if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {\n safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, \"*****\");\n }\n\n return safeURL;\n}\n\nexport function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {\n const headers: HttpHeaders = new HttpHeaders();\n for (const header of originalHeader.headersArray()) {\n if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {\n headers.set(header.name, \"*****\");\n } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {\n headers.set(header.name, sanitizeURL(header.value));\n } else {\n headers.set(header.name, header.value);\n }\n }\n\n return headers;\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nexport function iEqual(str1: string, str2: string): boolean {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nexport function getAccountNameFromUrl(url: string): string {\n const parsedUrl: URLBuilder = URLBuilder.parse(url);\n let accountName;\n try {\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost()!.split(\".\")[0];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath()!.split(\"/\")[1];\n } else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n } catch (error: any) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\n\nexport function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {\n if (parsedUrl.getHost() === undefined) {\n return false;\n }\n\n const host =\n parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? \"\" : \":\" + parsedUrl.getPort());\n\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return (\n /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(\n host\n ) ||\n (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()!))\n );\n}\n\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nexport function toBlobTagsString(tags?: Tags): string | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const tagPairs = [];\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);\n }\n }\n\n return tagPairs.join(\"&\");\n}\n\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nexport function toBlobTags(tags?: Tags): BlobTags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: BlobTags = {\n blobTagSet: [],\n };\n\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n res.blobTagSet.push({\n key,\n value,\n });\n }\n }\n return res;\n}\n\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nexport function toTags(tags?: BlobTags): Tags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: Tags = {};\n for (const blobTag of tags.blobTagSet) {\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nexport function toQuerySerialization(\n textConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration\n | BlobQueryParquetConfiguration\n): QuerySerialization | undefined {\n if (textConfiguration === undefined) {\n return undefined;\n }\n\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false,\n },\n },\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator,\n },\n },\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema,\n },\n },\n };\n case \"parquet\":\n return {\n format: {\n type: \"parquet\",\n },\n };\n\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\n\nexport function parseObjectReplicationRecord(\n objectReplicationRecord?: Record\n): ObjectReplicationPolicy[] | undefined {\n if (!objectReplicationRecord) {\n return undefined;\n }\n\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n\n const orProperties: ObjectReplicationPolicy[] = [];\n for (const key in objectReplicationRecord) {\n const ids = key.split(\"_\");\n const policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n const rule: ObjectReplicationRule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key] as ObjectReplicationStatus,\n };\n const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n } else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule],\n });\n }\n }\n return orProperties;\n}\n\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nexport function attachCredential(thing: T, credential: TokenCredential): T {\n (thing as any).credential = credential;\n return thing;\n}\n\nexport function httpAuthorizationToString(\n httpAuthorization?: HttpAuthorization\n): string | undefined {\n return httpAuthorization ? httpAuthorization.scheme + \" \" + httpAuthorization.value : undefined;\n}\n\nexport function BlobNameToString(name: BlobName): string {\n if (name.encoded) {\n return decodeURIComponent(name.content!);\n } else {\n return name.content!;\n }\n}\n\nexport function ConvertInternalResponseOfListBlobFlat(\n internalResponse: ListBlobsFlatSegmentResponse\n): ListBlobsFlatSegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function ConvertInternalResponseOfListBlobHierarchy(\n internalResponse: ListBlobsHierarchySegmentResponse\n): ListBlobsHierarchySegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefixModel = {\n ...blobPrefixInternal,\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function* ExtractPageRangeInfoItems(\n getPageRangesSegment: PageBlobGetPageRangesDiffResponseModel\n): IterableIterator {\n let pageRange: PageRange[] = [];\n let clearRange: ClearRange[] = [];\n\n if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange;\n if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange;\n\n let pageRangeIndex = 0;\n let clearRangeIndex = 0;\n\n while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {\n if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n ++pageRangeIndex;\n } else {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n ++clearRangeIndex;\n }\n }\n\n for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n }\n\n for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n }\n}\n\n/**\n * Escape the blobName but keep path separator ('/').\n */\nexport function EscapePath(blobName: string): string {\n const split = blobName.split(\"/\");\n for (let i = 0; i < split.length; i++) {\n split[i] = encodeURIComponent(split[i]);\n }\n return split.join(\"/\");\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js new file mode 100644 index 0000000..849508e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as fs from "fs"; +import * as util from "util"; +import { REQUEST_TIMEOUT } from "./constants"; +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +export async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +export async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into a buffer. + * + * @param stream - A Node.js Readable stream + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + */ +export async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding)); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +export async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +export const fsStat = util.promisify(fs.stat); +export const fsCreateReadStream = fs.createReadStream; +//# sourceMappingURL=utils.node.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map new file mode 100644 index 0000000..467ce63 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.node.js","sourceRoot":"","sources":["../../../../src/utils/utils.node.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,EAAE,eAAe,EAAE,MAAM,aAAa,CAAC;AAE9C;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,MAA6B,EAC7B,MAAc,EACd,MAAc,EACd,GAAW,EACX,QAAyB;IAEzB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,qBAAqB;IAClC,MAAM,KAAK,GAAG,GAAG,GAAG,MAAM,CAAC,CAAC,wCAAwC;IAEpE,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,MAAM,OAAO,GAAG,UAAU,CACxB,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC,EACxE,eAAe,CAChB,CAAC;QAEF,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;YACzB,IAAI,GAAG,IAAI,KAAK,EAAE;gBAChB,YAAY,CAAC,OAAO,CAAC,CAAC;gBACtB,OAAO,EAAE,CAAC;gBACV,OAAO;aACR;YAED,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;aACR;YACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;aACtC;YAED,qCAAqC;YACrC,MAAM,WAAW,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;YAE5E,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,WAAW,CAAC,CAAC;YACnF,GAAG,IAAI,WAAW,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,YAAY,CAAC,OAAO,CAAC,CAAC;YACtB,IAAI,GAAG,GAAG,KAAK,EAAE;gBACf,MAAM,CACJ,IAAI,KAAK,CACP,+DAA+D,GAAG,gBAAgB,KAAK,EAAE,CAC1F,CACF,CAAC;aACH;YACD,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACzB,YAAY,CAAC,OAAO,CAAC,CAAC;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,MAA6B,EAC7B,MAAc,EACd,QAAyB;IAEzB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,qBAAqB;IAClC,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IAEjC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;YACzB,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;aACR;YACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;aACtC;YAED,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,UAAU,EAAE;gBACnC,MAAM,CAAC,IAAI,KAAK,CAAC,4CAA4C,UAAU,EAAE,CAAC,CAAC,CAAC;gBAC5E,OAAO;aACR;YAED,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;YAC5C,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC;QACtB,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,OAAO,CAAC,GAAG,CAAC,CAAC;QACf,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,cAAqC,EACrC,QAAyB;IAEzB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,MAAM,GAAa,EAAE,CAAC;QAC5B,cAAc,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAqB,EAAE,EAAE;YAClD,MAAM,CAAC,IAAI,CAAC,IAAI,YAAY,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QAC3E,CAAC,CAAC,CAAC;QACH,cAAc,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAC5B,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QACjC,CAAC,CAAC,CAAC;QACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,EAAyB,EACzB,IAAY;IAEZ,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,MAAM,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAEtC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAExB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACd,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC;AAE9C,MAAM,CAAC,MAAM,kBAAkB,GAAG,EAAE,CAAC,gBAAgB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as fs from \"fs\";\nimport * as util from \"util\";\nimport { REQUEST_TIMEOUT } from \"./constants\";\n\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nexport async function streamToBuffer(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n offset: number,\n end: number,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const count = end - offset; // Total amount of data needed in stream\n\n return new Promise((resolve, reject) => {\n const timeout = setTimeout(\n () => reject(new Error(`The operation cannot be completed in timeout.`)),\n REQUEST_TIMEOUT\n );\n\n stream.on(\"readable\", () => {\n if (pos >= count) {\n clearTimeout(timeout);\n resolve();\n return;\n }\n\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n // How much data needed in this chunk\n const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n\n stream.on(\"end\", () => {\n clearTimeout(timeout);\n if (pos < count) {\n reject(\n new Error(\n `Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`\n )\n );\n }\n resolve();\n });\n\n stream.on(\"error\", (msg) => {\n clearTimeout(timeout);\n reject(msg);\n });\n });\n}\n\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nexport async function streamToBuffer2(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const bufferSize = buffer.length;\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n if (pos + chunk.length > bufferSize) {\n reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));\n return;\n }\n\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n\n stream.on(\"end\", () => {\n resolve(pos);\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into a buffer.\n *\n * @param stream - A Node.js Readable stream\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n */\nexport async function streamToBuffer3(\n readableStream: NodeJS.ReadableStream,\n encoding?: BufferEncoding\n): Promise {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n readableStream.on(\"data\", (data: Buffer | string) => {\n chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding));\n });\n readableStream.on(\"end\", () => {\n resolve(Buffer.concat(chunks));\n });\n readableStream.on(\"error\", reject);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nexport async function readStreamToLocalFile(\n rs: NodeJS.ReadableStream,\n file: string\n): Promise {\n return new Promise((resolve, reject) => {\n const ws = fs.createWriteStream(file);\n\n rs.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"close\", resolve);\n\n rs.pipe(ws);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nexport const fsStat = util.promisify(fs.stat);\n\nexport const fsCreateReadStream = fs.createReadStream;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js new file mode 100644 index 0000000..c7ae863 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This file is used as a shim of "BufferScheduler" for some browser bundlers +// when trying to bundle "BufferScheduler" +// "BufferScheduler" class is only available in Node.js runtime +export class BufferScheduler { +} +//# sourceMappingURL=BufferScheduler.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map new file mode 100644 index 0000000..7bf7733 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferScheduler.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,6EAA6E;AAC7E,0CAA0C;AAC1C,+DAA+D;AAC/D,MAAM,OAAO,eAAe;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BufferScheduler\" for some browser bundlers\n// when trying to bundle \"BufferScheduler\"\n// \"BufferScheduler\" class is only available in Node.js runtime\nexport class BufferScheduler {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js new file mode 100644 index 0000000..0d92285 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js @@ -0,0 +1,252 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EventEmitter } from "events"; +import { PooledBuffer } from "./PooledBuffer"; +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +export class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} +//# sourceMappingURL=BufferScheduler.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map new file mode 100644 index 0000000..a6b2161 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferScheduler.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAEtC,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAW9C;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,OAAO,eAAe;IAuF1B;;;;;;;;;;;OAWG;IACH,YACE,QAAkB,EAClB,UAAkB,EAClB,UAAkB,EAClB,eAAgC,EAChC,WAAmB,EACnB,QAAyB;QAlF3B;;WAEG;QACc,YAAO,GAAiB,IAAI,YAAY,EAAE,CAAC;QAO5D;;WAEG;QACK,WAAM,GAAW,CAAC,CAAC;QAE3B;;WAEG;QACK,gBAAW,GAAY,KAAK,CAAC;QAErC;;WAEG;QACK,YAAO,GAAY,KAAK,CAAC;QAEjC;;WAEG;QACK,8BAAyB,GAAW,CAAC,CAAC;QAO9C;;WAEG;QACK,eAAU,GAAW,CAAC,CAAC;QAE/B;;;;;;WAMG;QACK,wBAAmB,GAAa,EAAE,CAAC;QAE3C;;WAEG;QACK,qBAAgB,GAAW,CAAC,CAAC;QAErC;;WAEG;QACK,aAAQ,GAAmB,EAAE,CAAC;QAEtC;;WAEG;QACK,aAAQ,GAAmB,EAAE,CAAC;QAsBpC,IAAI,UAAU,IAAI,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,EAAE,CAAC,CAAC;SACpF;QAED,IAAI,UAAU,IAAI,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,EAAE,CAAC,CAAC;SACpF;QAED,IAAI,WAAW,IAAI,CAAC,EAAE;YACpB,MAAM,IAAI,UAAU,CAAC,iDAAiD,WAAW,EAAE,CAAC,CAAC;SACtF;QAED,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QACvC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAC3B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,EAAE;QACb,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC3C,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;gBAChC,IAAI,GAAG,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC1E,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;gBAEhC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;oBACvB,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;iBACvB;YACH,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAChC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAClC,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;gBAC3B,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;gBACxB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAChC,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;gBACpB,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;gBACtB,MAAM,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;gBAC/B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,IAAI,CAAC,uBAAuB,EAAE,CAAC;oBAC/B,OAAO;iBACR;gBAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,yBAAyB,KAAK,CAAC,EAAE;oBAC5D,IAAI,IAAI,CAAC,gBAAgB,GAAG,CAAC,IAAI,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,UAAU,EAAE;wBACxE,MAAM,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;wBACzD,IAAI,CAAC,eAAe,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,iBAAiB,EAAE,EAAE,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;6BAC7E,IAAI,CAAC,OAAO,CAAC;6BACb,KAAK,CAAC,MAAM,CAAC,CAAC;qBAClB;yBAAM,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnD,OAAO;qBACR;yBAAM;wBACL,OAAO,EAAE,CAAC;qBACX;iBACF;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACK,oBAAoB,CAAC,IAAY;QACvC,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC;IACvC,CAAC;IAED;;;;OAIG;IACK,kCAAkC,CAAC,MAAqB;QAC9D,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;SAC7F;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;SAC9D;QAED,IAAI,CAAC,gBAAgB,IAAI,MAAM,CAAC,IAAI,CAAC;QACrC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;;OAQG;IACK,WAAW;QACjB,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;YAC/C,IAAI,MAAoB,CAAC;YAEzB,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAG,CAAC;gBAChC,IAAI,CAAC,kCAAkC,CAAC,MAAM,CAAC,CAAC;aACjD;iBAAM;gBACL,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;oBACrC,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;oBACnD,IAAI,CAAC,UAAU,EAAE,CAAC;iBACnB;qBAAM;oBACL,gDAAgD;oBAChD,OAAO,KAAK,CAAC;iBACd;aACF;YAED,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,uBAAuB,EAAE,CAAC;SAChC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,uBAAuB;QACnC,IAAI,MAAgC,CAAC;QACrC,GAAG;YACD,IAAI,IAAI,CAAC,yBAAyB,IAAI,IAAI,CAAC,WAAW,EAAE;gBACtD,OAAO;aACR;YAED,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,MAAM,EAAE;gBACV,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;aACrC;SACF,QAAQ,MAAM,EAAE;IACnB,CAAC;IAED;;;;OAIG;IACK,KAAK,CAAC,sBAAsB,CAAC,MAAoB;QACvD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC;QAEjC,IAAI,CAAC,yBAAyB,EAAE,CAAC;QACjC,IAAI,CAAC,MAAM,IAAI,YAAY,CAAC;QAE5B,IAAI;YACF,MAAM,IAAI,CAAC,eAAe,CACxB,GAAG,EAAE,CAAC,MAAM,CAAC,iBAAiB,EAAE,EAChC,YAAY,EACZ,IAAI,CAAC,MAAM,GAAG,YAAY,CAC3B,CAAC;SACH;QAAC,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO;SACR;QAED,IAAI,CAAC,yBAAyB,EAAE,CAAC;QACjC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAChC,CAAC;IAED;;;;OAIG;IACK,WAAW,CAAC,MAAoB;QACtC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;YAC5D,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;SACxB;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EventEmitter } from \"events\";\nimport { Readable } from \"stream\";\nimport { PooledBuffer } from \"./PooledBuffer\";\n\n/**\n * OutgoingHandler is an async function triggered by BufferScheduler.\n */\nexport declare type OutgoingHandler = (\n body: () => NodeJS.ReadableStream,\n length: number,\n offset?: number\n) => Promise;\n\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nexport class BufferScheduler {\n /**\n * Size of buffers in incoming and outgoing queues. This class will try to align\n * data read from Readable stream into buffer chunks with bufferSize defined.\n */\n private readonly bufferSize: number;\n\n /**\n * How many buffers can be created or maintained.\n */\n private readonly maxBuffers: number;\n\n /**\n * A Node.js Readable stream.\n */\n private readonly readable: Readable;\n\n /**\n * OutgoingHandler is an async function triggered by BufferScheduler when there\n * are available buffers in outgoing array.\n */\n private readonly outgoingHandler: OutgoingHandler;\n\n /**\n * An internal event emitter.\n */\n private readonly emitter: EventEmitter = new EventEmitter();\n\n /**\n * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)\n */\n private readonly concurrency: number;\n\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n private offset: number = 0;\n\n /**\n * An internal marker to track whether stream is end.\n */\n private isStreamEnd: boolean = false;\n\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n private isError: boolean = false;\n\n /**\n * How many handlers are executing.\n */\n private executingOutgoingHandlers: number = 0;\n\n /**\n * Encoding of the input Readable stream which has string data type instead of Buffer.\n */\n private encoding?: BufferEncoding;\n\n /**\n * How many buffers have been allocated.\n */\n private numBuffers: number = 0;\n\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n private unresolvedDataArray: Buffer[] = [];\n\n /**\n * How much data consisted in unresolvedDataArray.\n */\n private unresolvedLength: number = 0;\n\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n private incoming: PooledBuffer[] = [];\n\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n private outgoing: PooledBuffer[] = [];\n\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n constructor(\n readable: Readable,\n bufferSize: number,\n maxBuffers: number,\n outgoingHandler: OutgoingHandler,\n concurrency: number,\n encoding?: BufferEncoding\n ) {\n if (bufferSize <= 0) {\n throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);\n }\n\n if (maxBuffers <= 0) {\n throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);\n }\n\n if (concurrency <= 0) {\n throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);\n }\n\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n public async do(): Promise {\n return new Promise((resolve, reject) => {\n this.readable.on(\"data\", (data) => {\n data = typeof data === \"string\" ? Buffer.from(data, this.encoding) : data;\n this.appendUnresolvedData(data);\n\n if (!this.resolveData()) {\n this.readable.pause();\n }\n });\n\n this.readable.on(\"error\", (err) => {\n this.emitter.emit(\"error\", err);\n });\n\n this.readable.on(\"end\", () => {\n this.isStreamEnd = true;\n this.emitter.emit(\"checkEnd\");\n });\n\n this.emitter.on(\"error\", (err) => {\n this.isError = true;\n this.readable.pause();\n reject(err);\n });\n\n this.emitter.on(\"checkEnd\", () => {\n if (this.outgoing.length > 0) {\n this.triggerOutgoingHandlers();\n return;\n }\n\n if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {\n if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {\n const buffer = this.shiftBufferFromUnresolvedDataArray();\n this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)\n .then(resolve)\n .catch(reject);\n } else if (this.unresolvedLength >= this.bufferSize) {\n return;\n } else {\n resolve();\n }\n }\n });\n });\n }\n\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n private appendUnresolvedData(data: Buffer) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n }\n\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n private shiftBufferFromUnresolvedDataArray(buffer?: PooledBuffer): PooledBuffer {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n } else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n\n this.unresolvedLength -= buffer.size;\n return buffer;\n }\n\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n private resolveData(): boolean {\n while (this.unresolvedLength >= this.bufferSize) {\n let buffer: PooledBuffer;\n\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift()!;\n this.shiftBufferFromUnresolvedDataArray(buffer);\n } else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n } else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n }\n\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n private async triggerOutgoingHandlers() {\n let buffer: PooledBuffer | undefined;\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return;\n }\n\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n }\n\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n private async triggerOutgoingHandler(buffer: PooledBuffer): Promise {\n const bufferLength = buffer.size;\n\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n\n try {\n await this.outgoingHandler(\n () => buffer.getReadableStream(),\n bufferLength,\n this.offset - bufferLength\n );\n } catch (err: any) {\n this.emitter.emit(\"error\", err);\n return;\n }\n\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n }\n\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n private reuseBuffer(buffer: PooledBuffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js new file mode 100644 index 0000000..f035c02 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +/** + * This class generates a readable stream from the data in an array of buffers. + */ +export class BuffersStream extends Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} +//# sourceMappingURL=BuffersStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map new file mode 100644 index 0000000..b5b8187 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BuffersStream.js","sourceRoot":"","sources":["../../../../storage-common/src/BuffersStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAmB,MAAM,QAAQ,CAAC;AAOnD;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,QAAQ;IAgBzC;;;;;;OAMG;IACH,YACU,OAAiB,EACjB,UAAkB,EAC1B,OAA8B;QAE9B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJP,YAAO,GAAP,OAAO,CAAU;QACjB,eAAU,GAAV,UAAU,CAAQ;QAI1B,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;QAE3B,4DAA4D;QAC5D,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,aAAa,IAAI,GAAG,CAAC,UAAU,CAAC;SACjC;QACD,IAAI,aAAa,GAAG,IAAI,CAAC,UAAU,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;SACpF;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,IAAa;QACxB,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,UAAU,EAAE;YAC7C,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACjB;QAED,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,IAAI,CAAC,qBAAqB,CAAC;SACnC;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,UAAU,EAAE;YAC3D,2DAA2D;YAC3D,MAAM,yBAAyB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;YAC3E,MAAM,6BAA6B,GACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC;YAC7E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,6BAA6B,EAAE,yBAAyB,CAAC,CAAC;YACrF,IAAI,SAAS,GAAG,IAAI,GAAG,CAAC,EAAE;gBACxB,uBAAuB;gBACvB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,IAAI,GAAG,CAAC,CAAC;gBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,CAAC,iBAAiB,IAAI,IAAI,GAAG,CAAC,CAAC;gBACnC,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;gBACrC,CAAC,GAAG,IAAI,CAAC;gBACT,MAAM;aACP;iBAAM;gBACL,wBAAwB;gBACxB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,SAAS,CAAC;gBACvD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,SAAS,KAAK,6BAA6B,EAAE;oBAC/C,4DAA4D;oBAC5D,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;oBACnC,IAAI,CAAC,WAAW,EAAE,CAAC;iBACpB;qBAAM;oBACL,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;iBACtC;gBACD,IAAI,CAAC,iBAAiB,IAAI,SAAS,CAAC;gBACpC,CAAC,IAAI,SAAS,CAAC;aAChB;SACF;QAED,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;SACtC;aAAM,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable, ReadableOptions } from \"stream\";\n\n/**\n * Options to configure the BuffersStream.\n */\nexport interface BuffersStreamOptions extends ReadableOptions {}\n\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nexport class BuffersStream extends Readable {\n /**\n * The offset of data to be read in the current buffer.\n */\n private byteOffsetInCurrentBuffer: number;\n\n /**\n * The index of buffer to be read in the array of buffers.\n */\n private bufferIndex: number;\n\n /**\n * The total length of data already read.\n */\n private pushedBytesLength: number;\n\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n constructor(\n private buffers: Buffer[],\n private byteLength: number,\n options?: BuffersStreamOptions\n ) {\n super(options);\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex = 0;\n this.pushedBytesLength = 0;\n\n // check byteLength is no larger than buffers[] total length\n let buffersLength = 0;\n for (const buf of this.buffers) {\n buffersLength += buf.byteLength;\n }\n if (buffersLength < this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n }\n\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n public _read(size?: number) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n\n if (!size) {\n size = this.readableHighWaterMark;\n }\n\n const outBuffers: Buffer[] = [];\n let i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n const remainingCapacityInThisBuffer =\n this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n const end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n } else {\n // chunkSize = remaining\n const end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n } else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n } else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js new file mode 100644 index 0000000..b153905 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BuffersStream } from "./BuffersStream"; +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = require("buffer").constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +export class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} +//# sourceMappingURL=PooledBuffer.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map new file mode 100644 index 0000000..e2e6415 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"PooledBuffer.js","sourceRoot":"","sources":["../../../../storage-common/src/PooledBuffer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD;;GAEG;AACH,6DAA6D;AAC7D,MAAM,eAAe,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC;AAE/D;;;;;;;GAOG;AACH,MAAM,OAAO,YAAY;IA4CvB,YAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB;QA3CtE;;;WAGG;QACK,YAAO,GAAa,EAAE,CAAC;QAwC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QAEf,WAAW;QACX,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;YAClC,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,GAAG,GAAG,eAAe,CAAC;aACvB;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;SAC5C;QAED,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;SAClC;IACH,CAAC;IA5CD;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAyCD;;;;;;;;OAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB;QAChD,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;QACrB,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE;YAClC,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAC/B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;YAC1B,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;gBAClC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;aAClB;YACD,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;gBAClC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;aAClB;SACF;QAED,mCAAmC;QACnC,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACrB,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;YACtB,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SAC7C;IACH,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IACpD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BuffersStream } from \"./BuffersStream\";\nimport { Readable } from \"stream\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nconst maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number) {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js new file mode 100644 index 0000000..3eca477 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./BufferScheduler.browser"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map new file mode 100644 index 0000000..fb2fafa --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler.browser\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js new file mode 100644 index 0000000..993a263 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./BufferScheduler"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map new file mode 100644 index 0000000..9538bda --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-common/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,mBAAmB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js new file mode 100644 index 0000000..e7e5f2f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const AVRO_SYNC_MARKER_SIZE = 16; +export const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +export const AVRO_CODEC_KEY = "avro.codec"; +export const AVRO_SCHEMA_KEY = "avro.schema"; +//# sourceMappingURL=AvroConstants.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map new file mode 100644 index 0000000..663b8f2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroConstants.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroConstants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,qBAAqB,GAAW,EAAE,CAAC;AAChD,MAAM,CAAC,MAAM,eAAe,GAAe,IAAI,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,cAAc,GAAW,YAAY,CAAC;AACnD,MAAM,CAAC,MAAM,eAAe,GAAW,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const AVRO_SYNC_MARKER_SIZE: number = 16;\nexport const AVRO_INIT_BYTES: Uint8Array = new Uint8Array([79, 98, 106, 1]);\nexport const AVRO_CODEC_KEY: string = \"avro.codec\";\nexport const AVRO_SCHEMA_KEY: string = \"avro.schema\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js new file mode 100644 index 0000000..e815c69 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js @@ -0,0 +1,312 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +export class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + // eslint-disable-line @typescript-eslint/ban-types + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} +//# sourceMappingURL=AvroParser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map new file mode 100644 index 0000000..5106dca --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroParser.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroParser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAqBlC,MAAM,OAAO,UAAU;IACrB;;;;;;OAMG;IACI,MAAM,CAAC,KAAK,CAAC,cAAc,CAChC,MAAoB,EACpB,MAAc,EACd,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAC9E,IAAI,KAAK,CAAC,MAAM,KAAK,MAAM,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;SACpC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;OAKG;IACK,MAAM,CAAC,KAAK,CAAC,QAAQ,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAChE,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;IAChB,CAAC;IAED,6DAA6D;IAC7D,8EAA8E;IAC9E,oFAAoF;IAC5E,MAAM,CAAC,KAAK,CAAC,cAAc,CACjC,MAAoB,EACpB,UAAiC,EAAE;QAEnC,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,IAAI,iBAAiB,GAAG,CAAC,CAAC;QAC1B,IAAI,IAAI,EAAE,YAAY,EAAE,mBAAmB,CAAC;QAE5C,GAAG;YACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAClD,YAAY,GAAG,IAAI,GAAG,IAAI,CAAC;YAC3B,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,iBAAiB,CAAC;YACpD,iBAAiB,IAAI,CAAC,CAAC;SACxB,QAAQ,YAAY,IAAI,iBAAiB,GAAG,EAAE,EAAE,CAAC,mDAAmD;QAErG,IAAI,YAAY,EAAE;YAChB,6BAA6B;YAC7B,0CAA0C;YAC1C,aAAa,GAAG,aAAa,CAAC;YAC9B,mBAAmB,GAAG,SAAS,CAAC,CAAC,WAAW;YAC5C,GAAG;gBACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAClD,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,mBAAmB,CAAC;gBACrD,mBAAmB,IAAI,GAAG,CAAC,CAAC,SAAS;aACtC,QAAQ,IAAI,GAAG,IAAI,EAAE;YAEtB,MAAM,GAAG,GAAG,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;YAC3E,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,EAAE;gBAClE,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;aACtC;YACD,OAAO,GAAG,CAAC;SACZ;QAED,OAAO,CAAC,aAAa,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC;IACrD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,QAAQ,CAC1B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAO,CACzB,MAAoB,EACpB,UAAiC,EAAE;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,QAAQ;QAC1B,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,WAAW,CAC7B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,CAAC,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,KAAK,CAAC,EAAE;YACX,OAAO,IAAI,CAAC;SACb;aAAM,IAAI,CAAC,KAAK,CAAC,EAAE;YAClB,OAAO,KAAK,CAAC;SACd;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;SAC5C;IACH,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAClE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,sBAAsB;IACzD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAClE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,sBAAsB;IACzD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,IAAI,IAAI,GAAG,CAAC,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IACjE,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC1D,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QACtC,OAAO,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAEO,MAAM,CAAC,KAAK,CAAC,WAAW,CAC9B,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACzD,0GAA0G;QAC1G,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACpD,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAO,CACzB,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,OAA8B,EAAE,EACN,EAAE;YAC5B,OAAO,UAAU,CAAC,WAAW,CAAC,CAAC,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;QACzD,CAAC,CAAC;QAEF,MAAM,KAAK,GAAsB,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;QAE7F,MAAM,IAAI,GAAsB,EAAE,CAAC;QACnC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,MAAM,CAAC,KAAK,CAAC,SAAS,CAC5B,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAQ,EAAE,CAAC;QACtB,KACE,IAAI,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,KAAK,KAAK,CAAC,EACX,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD;YACA,IAAI,KAAK,GAAG,CAAC,EAAE;gBACb,qBAAqB;gBACrB,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAC3C,KAAK,GAAG,CAAC,KAAK,CAAC;aAChB;YAED,OAAO,KAAK,EAAE,EAAE;gBACd,MAAM,IAAI,GAAM,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBACtD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAClB;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AAOD,IAAK,WAOJ;AAPD,WAAK,WAAW;IACd,gCAAiB,CAAA;IACjB,4BAAa,CAAA;IACb,8BAAe,CAAA;IACf,0BAAW,CAAA;IACX,8BAAe,CAAA;IACf,8BAAe,CAAA;AACjB,CAAC,EAPI,WAAW,KAAX,WAAW,QAOf;AAYD,IAAK,aASJ;AATD,WAAK,aAAa;IAChB,8BAAa,CAAA;IACb,oCAAmB,CAAA;IACnB,4BAAW,CAAA;IACX,8BAAa,CAAA;IACb,gCAAe,CAAA;IACf,kCAAiB,CAAA;IACjB,gCAAe,CAAA;IACf,kCAAiB,CAAA;AACnB,CAAC,EATI,aAAa,KAAb,aAAa,QASjB;AAED,MAAM,OAAgB,QAAQ;IAS5B;;OAEG;IACI,MAAM,CAAC,UAAU,CAAC,MAAuB;QAC9C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;SAC1C;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;YAChC,OAAO,QAAQ,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAsB,CAAC,CAAC;SAC1D;IACH,CAAC;IAEO,MAAM,CAAC,gBAAgB,CAAC,MAAc;QAC5C,QAAQ,MAAM,EAAE;YACd,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,OAAO,CAAC;YAC3B,KAAK,aAAa,CAAC,GAAG,CAAC;YACvB,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM,CAAC;YAC1B,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,IAAI,iBAAiB,CAAC,MAAuB,CAAC,CAAC;YACxD;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,EAAE,CAAC,CAAC;SACrD;IACH,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,MAAa;QAC1C,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC5D,CAAC;IAEO,MAAM,CAAC,gBAAgB,CAAC,MAAoB;QAClD,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACzB,kDAAkD;QAClD,IAAI;YACF,OAAO,QAAQ,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;SACxC;QAAC,OAAO,GAAQ,EAAE;YACjB,+BAA+B;SAChC;QAED,QAAQ,IAAI,EAAE;YACZ,KAAK,WAAW,CAAC,MAAM;gBACrB,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,EAAE,CAAC,CAAC;iBAC1E;gBACD,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,sDAAsD,MAAM,EAAE,CAAC,CAAC;iBACjF;gBAED,gDAAgD;gBAChD,MAAM,MAAM,GAA6B,EAAE,CAAC;gBAC5C,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,EAAE,CAAC,CAAC;iBACnF;gBACD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE;oBACjC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBACtD;gBACD,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;YACjD,KAAK,WAAW,CAAC,IAAI;gBACnB,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,EAAE,CAAC,CAAC;iBAC1E;gBACD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;oBACnB,MAAM,IAAI,KAAK,CAAC,yDAAyD,MAAM,EAAE,CAAC,CAAC;iBACpF;gBACD,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAC1C,KAAK,WAAW,CAAC,GAAG;gBAClB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,EAAE,CAAC,CAAC;iBACnF;gBACD,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;YAC7D,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC,eAAe;YACvC,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC,eAAe;YACvC;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,IAAI,OAAO,MAAM,EAAE,CAAC,CAAC;SAChE;IACH,CAAC;CACF;AAED,MAAM,iBAAkB,SAAQ,QAAQ;IAGtC,YAAY,SAAwB;QAClC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAEM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACnE,QAAQ,IAAI,CAAC,UAAU,EAAE;YACvB,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;YAC/B,KAAK,aAAa,CAAC,OAAO;gBACxB,OAAO,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YACjD,KAAK,aAAa,CAAC,GAAG;gBACpB,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC7C,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC9C,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;SAC7C;IACH,CAAC;CACF;AAED,MAAM,YAAa,SAAQ,QAAQ;IAGjC,YAAY,OAAiB;QAC3B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACzE,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC9B,CAAC;CACF;AAED,MAAM,aAAc,SAAQ,QAAQ;IAGlC,YAAY,KAAiB;QAC3B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;IACtB,CAAC;IAEM,KAAK,CAAC,IAAI,CACf,MAAoB,EACpB,UAAiC,EAAE;QAEnC,mDAAmD;QACnD,MAAM,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACtD,CAAC;CACF;AAED,MAAM,WAAY,SAAQ,QAAQ;IAGhC,YAAY,QAAkB;QAC5B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAEM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACnE,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA4B,EACJ,EAAE;YAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QACtC,CAAC,CAAC;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;CACF;AAED,MAAM,cAAe,SAAQ,QAAQ;IAInC,YAAY,MAAgC,EAAE,IAAY;QACxD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;IACpB,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACzE,MAAM,MAAM,GAAkC,EAAE,CAAC;QACjD,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;QAC/B,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;gBAC3D,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aAC7D;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of the Object usage and non-interfaces\n/* eslint-disable @typescript-eslint/ban-types, @azure/azure-sdk/ts-use-interface-parameters */\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { KeyValuePair } from \"./utils/utils.common\";\n\n/**\n * Options to configure the AvroParser read methods.\n * See {@link AvroParser.readFixedBytes}, {@link AvroParser.readMap} and etc.\n */\ninterface AvroParserReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroParser {\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n public static async readFixedBytes(\n stream: AvroReadable,\n length: number,\n options: AvroParserReadOptions = {}\n ): Promise {\n const bytes = await stream.read(length, { abortSignal: options.abortSignal });\n if (bytes.length !== length) {\n throw new Error(\"Hit stream end.\");\n }\n return bytes;\n }\n\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n private static async readByte(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const buf = await AvroParser.readFixedBytes(stream, 1, options);\n return buf[0];\n }\n\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n private static async readZigZagLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n let zigZagEncoded = 0;\n let significanceInBit = 0;\n let byte, haveMoreByte, significanceInFloat;\n\n do {\n byte = await AvroParser.readByte(stream, options);\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers\n\n if (haveMoreByte) {\n // Switch to float arithmetic\n // eslint-disable-next-line no-self-assign\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n do {\n byte = await AvroParser.readByte(stream, options);\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n } while (byte & 0x80);\n\n const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return res;\n }\n\n return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);\n }\n\n public static async readLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readInt(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readNull(): Promise {\n return null;\n }\n\n public static async readBoolean(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const b = await AvroParser.readByte(stream, options);\n if (b === 1) {\n return true;\n } else if (b === 0) {\n return false;\n } else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n\n public static async readFloat(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 4, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat32(0, true); // littleEndian = true\n }\n\n public static async readDouble(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 8, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat64(0, true); // littleEndian = true\n }\n\n public static async readBytes(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const size = await AvroParser.readLong(stream, options);\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n\n return stream.read(size, { abortSignal: options.abortSignal });\n }\n\n public static async readString(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readBytes(stream, options);\n const utf8decoder = new TextDecoder();\n return utf8decoder.decode(u8arr);\n }\n\n private static async readMapPair(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const key = await AvroParser.readString(stream, options);\n // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.\n const value = await readItemMethod(stream, options);\n return { key, value };\n }\n\n public static async readMap(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const readPairMethod = (\n s: AvroReadable,\n opts: AvroParserReadOptions = {}\n ): Promise> => {\n return AvroParser.readMapPair(s, readItemMethod, opts);\n };\n\n const pairs: KeyValuePair[] = await AvroParser.readArray(stream, readPairMethod, options);\n\n const dict: Record = {};\n for (const pair of pairs) {\n dict[pair.key] = pair.value;\n }\n return dict;\n }\n\n private static async readArray(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise {\n const items: T[] = [];\n for (\n let count = await AvroParser.readLong(stream, options);\n count !== 0;\n count = await AvroParser.readLong(stream, options)\n ) {\n if (count < 0) {\n // Ignore block sizes\n await AvroParser.readLong(stream, options);\n count = -count;\n }\n\n while (count--) {\n const item: T = await readItemMethod(stream, options);\n items.push(item);\n }\n }\n return items;\n }\n}\n\ninterface RecordField {\n name: string;\n type: string | ObjectSchema | (string | ObjectSchema)[]; // Unions may not immediately contain other unions.\n}\n\nenum AvroComplex {\n RECORD = \"record\",\n ENUM = \"enum\",\n ARRAY = \"array\",\n MAP = \"map\",\n UNION = \"union\",\n FIXED = \"fixed\",\n}\n\ninterface ObjectSchema {\n type: Exclude;\n name?: string;\n aliases?: string;\n fields?: RecordField[];\n symbols?: string[];\n values?: string;\n size?: number;\n}\n\nenum AvroPrimitive {\n NULL = \"null\",\n BOOLEAN = \"boolean\",\n INT = \"int\",\n LONG = \"long\",\n FLOAT = \"float\",\n DOUBLE = \"double\",\n BYTES = \"bytes\",\n STRING = \"string\",\n}\n\nexport abstract class AvroType {\n /**\n * Reads an object from the stream.\n */\n public abstract read(\n stream: AvroReadable,\n options?: AvroParserReadOptions\n ): Promise; // eslint-disable-line @typescript-eslint/ban-types\n\n /**\n * Determines the AvroType from the Avro Schema.\n */\n public static fromSchema(schema: string | Object): AvroType {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n } else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n } else {\n return AvroType.fromObjectSchema(schema as ObjectSchema);\n }\n }\n\n private static fromStringSchema(schema: string): AvroType {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema as AvroPrimitive);\n default:\n throw new Error(`Unexpected Avro type ${schema}`);\n }\n }\n\n private static fromArraySchema(schema: any[]): AvroType {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n }\n\n private static fromObjectSchema(schema: ObjectSchema): AvroType {\n const type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n } catch (err: any) {\n // eslint-disable-line no-empty\n }\n\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.name) {\n throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);\n }\n\n // eslint-disable-next-line no-case-declarations\n const fields: Record = {};\n if (!schema.fields) {\n throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);\n }\n for (const field of schema.fields) {\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.symbols) {\n throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(`Unexpected Avro type ${type} in ${schema}`);\n }\n }\n}\n\nclass AvroPrimitiveType extends AvroType {\n private _primitive: AvroPrimitive;\n\n constructor(primitive: AvroPrimitive) {\n super();\n this._primitive = primitive;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n switch (this._primitive) {\n case AvroPrimitive.NULL:\n return AvroParser.readNull();\n case AvroPrimitive.BOOLEAN:\n return AvroParser.readBoolean(stream, options);\n case AvroPrimitive.INT:\n return AvroParser.readInt(stream, options);\n case AvroPrimitive.LONG:\n return AvroParser.readLong(stream, options);\n case AvroPrimitive.FLOAT:\n return AvroParser.readFloat(stream, options);\n case AvroPrimitive.DOUBLE:\n return AvroParser.readDouble(stream, options);\n case AvroPrimitive.BYTES:\n return AvroParser.readBytes(stream, options);\n case AvroPrimitive.STRING:\n return AvroParser.readString(stream, options);\n default:\n throw new Error(\"Unknown Avro Primitive\");\n }\n }\n}\n\nclass AvroEnumType extends AvroType {\n private readonly _symbols: string[];\n\n constructor(symbols: string[]) {\n super();\n this._symbols = symbols;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const value = await AvroParser.readInt(stream, options);\n return this._symbols[value];\n }\n}\n\nclass AvroUnionType extends AvroType {\n private readonly _types: AvroType[];\n\n constructor(types: AvroType[]) {\n super();\n this._types = types;\n }\n\n public async read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n // eslint-disable-line @typescript-eslint/ban-types\n const typeIndex = await AvroParser.readInt(stream, options);\n return this._types[typeIndex].read(stream, options);\n }\n}\n\nclass AvroMapType extends AvroType {\n private readonly _itemType: AvroType;\n\n constructor(itemType: AvroType) {\n super();\n this._itemType = itemType;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const readItemMethod = (\n s: AvroReadable,\n opts?: AvroParserReadOptions\n ): Promise => {\n return this._itemType.read(s, opts);\n };\n return AvroParser.readMap(stream, readItemMethod, options);\n }\n}\n\nclass AvroRecordType extends AvroType {\n private readonly _name: string;\n private readonly _fields: Record;\n\n constructor(fields: Record, name: string) {\n super();\n this._fields = fields;\n this._name = name;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const record: Record = {};\n record[\"$schema\"] = this._name;\n for (const key in this._fields) {\n if (Object.prototype.hasOwnProperty.call(this._fields, key)) {\n record[key] = await this._fields[key].read(stream, options);\n }\n }\n return record;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js new file mode 100644 index 0000000..86ac741 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class AvroReadable { +} +//# sourceMappingURL=AvroReadable.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map new file mode 100644 index 0000000..2a421a4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadable.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadable.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAelC,MAAM,OAAgB,YAAY;CAGjC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReadable.read} operation.\n */\nexport interface AvroReadableReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport abstract class AvroReadable {\n public abstract get position(): number;\n public abstract read(size: number, options?: AvroReadableReadOptions): Promise;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js new file mode 100644 index 0000000..5d58a86 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AvroReadable } from "./AvroReadable"; +import { AbortError } from "@azure/abort-controller"; +const ABORT_ERROR = new AbortError("Reading from the avro blob was aborted."); +export class AvroReadableFromBlob extends AvroReadable { + constructor(blob) { + super(); + this._blob = blob; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + size = Math.min(size, this._blob.size - this._position); + if (size <= 0) { + return new Uint8Array(); + } + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + function cleanUp() { + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + } + function abortHandler() { + fileReader.abort(); + cleanUp(); + reject(ABORT_ERROR); + } + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + fileReader.onloadend = (ev) => { + cleanUp(); + resolve(new Uint8Array(ev.target.result)); + }; + fileReader.onerror = () => { + cleanUp(); + reject(); + }; + fileReader.readAsArrayBuffer(this._blob.slice(this._position, (this._position += size))); + }); + } +} +//# sourceMappingURL=AvroReadableFromBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map new file mode 100644 index 0000000..192c800 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadableFromBlob.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadableFromBlob.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAA2B,MAAM,gBAAgB,CAAC;AACvE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,MAAM,WAAW,GAAG,IAAI,UAAU,CAAC,yCAAyC,CAAC,CAAC;AAE9E,MAAM,OAAO,oBAAqB,SAAQ,YAAY;IAIpD,YAAY,IAAU;QACpB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;IACrB,CAAC;IAED,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE;QACnE,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC;QACxD,IAAI,IAAI,IAAI,CAAC,EAAE;YACb,OAAO,IAAI,UAAU,EAAE,CAAC;SACzB;QAED,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;QACpC,OAAO,IAAI,OAAO,CAAa,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACjD,SAAS,OAAO;gBACd,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;iBACjE;YACH,CAAC;YAED,SAAS,YAAY;gBACnB,UAAU,CAAC,KAAK,EAAE,CAAC;gBACnB,OAAO,EAAE,CAAC;gBACV,MAAM,CAAC,WAAW,CAAC,CAAC;YACtB,CAAC;YAED,IAAI,OAAO,CAAC,WAAW,EAAE;gBACvB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;aAC7D;YAED,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;gBACjC,OAAO,EAAE,CAAC;gBACV,OAAO,CAAC,IAAI,UAAU,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC,CAAC;YAC7C,CAAC,CAAC;YAEF,UAAU,CAAC,OAAO,GAAG,GAAG,EAAE;gBACxB,OAAO,EAAE,CAAC;gBACV,MAAM,EAAE,CAAC;YACX,CAAC,CAAC;YAEF,UAAU,CAAC,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;QAC3F,CAAC,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro blob was aborted.\");\n\nexport class AvroReadableFromBlob extends AvroReadable {\n private _position: number;\n private _blob: Blob;\n\n constructor(blob: Blob) {\n super();\n this._blob = blob;\n this._position = 0;\n }\n\n public get position(): number {\n return this._position;\n }\n\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n size = Math.min(size, this._blob.size - this._position);\n if (size <= 0) {\n return new Uint8Array();\n }\n\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n function cleanUp(): void {\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n }\n\n function abortHandler(): void {\n fileReader.abort();\n cleanUp();\n reject(ABORT_ERROR);\n }\n\n if (options.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", abortHandler);\n }\n\n fileReader.onloadend = (ev: any) => {\n cleanUp();\n resolve(new Uint8Array(ev.target!.result));\n };\n\n fileReader.onerror = () => {\n cleanUp();\n reject();\n };\n\n fileReader.readAsArrayBuffer(this._blob.slice(this._position, (this._position += size)));\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js new file mode 100644 index 0000000..197fbd3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AvroReadable } from "./AvroReadable"; +import { AbortError } from "@azure/abort-controller"; +const ABORT_ERROR = new AbortError("Reading from the avro stream was aborted."); +export class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} +//# sourceMappingURL=AvroReadableFromStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map new file mode 100644 index 0000000..9cba217 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadableFromStream.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadableFromStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAA2B,MAAM,gBAAgB,CAAC;AACvE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,MAAM,WAAW,GAAG,IAAI,UAAU,CAAC,2CAA2C,CAAC,CAAC;AAEhF,MAAM,OAAO,sBAAuB,SAAQ,YAAY;IAWtD,YAAY,QAA+B;QACzC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;IACrB,CAAC;IAXO,YAAY,CAAC,IAAqB;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC1B;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAOD,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACM,KAAK,CAAC,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE;;QACnE,IAAI,MAAA,OAAO,CAAC,WAAW,0CAAE,OAAO,EAAE;YAChC,MAAM,WAAW,CAAC;SACnB;QAED,IAAI,IAAI,GAAG,CAAC,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,sCAAsC,IAAI,EAAE,CAAC,CAAC;SAC/D;QAED,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,IAAI,UAAU,EAAE,CAAC;SACzB;QAED,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE;YAC5B,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QACD,uCAAuC;QACvC,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxC,IAAI,KAAK,EAAE;YACT,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;YAC/B,gEAAgE;YAChE,OAAO,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;SACjC;aAAM;YACL,oDAAoD;YACpD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,4DAA4D;gBAC5D,MAAM,OAAO,GAAe,GAAG,EAAE;oBAC/B,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;oBAC5D,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBACvD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;oBACrD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBAEvD,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;qBACjE;gBACH,CAAC,CAAC;gBAEF,MAAM,gBAAgB,GAAe,GAAG,EAAE;oBACxC,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChD,IAAI,aAAa,EAAE;wBACjB,IAAI,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;wBACvC,OAAO,EAAE,CAAC;wBACV,wEAAwE;wBACxE,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC,CAAC;qBAC3C;gBACH,CAAC,CAAC;gBAEF,MAAM,cAAc,GAAe,GAAG,EAAE;oBACtC,OAAO,EAAE,CAAC;oBACV,MAAM,EAAE,CAAC;gBACX,CAAC,CAAC;gBAEF,MAAM,YAAY,GAAe,GAAG,EAAE;oBACpC,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,WAAW,CAAC,CAAC;gBACtB,CAAC,CAAC;gBAEF,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAChD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;iBAC9D;gBACD,2DAA2D;YAC7D,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro stream was aborted.\");\n\nexport class AvroReadableFromStream extends AvroReadable {\n private _position: number;\n private _readable: NodeJS.ReadableStream;\n\n private toUint8Array(data: string | Buffer): Uint8Array {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n }\n\n constructor(readable: NodeJS.ReadableStream) {\n super();\n this._readable = readable;\n this._position = 0;\n }\n public get position(): number {\n return this._position;\n }\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n if (options.abortSignal?.aborted) {\n throw ABORT_ERROR;\n }\n\n if (size < 0) {\n throw new Error(`size parameter should be positive: ${size}`);\n }\n\n if (size === 0) {\n return new Uint8Array();\n }\n\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n // See if there is already enough data.\n const chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return this.toUint8Array(chunk);\n } else {\n // register callback to wait for enough data to read\n return new Promise((resolve, reject) => {\n /* eslint-disable @typescript-eslint/no-use-before-define */\n const cleanUp: () => void = () => {\n this._readable.removeListener(\"readable\", readableCallback);\n this._readable.removeListener(\"error\", rejectCallback);\n this._readable.removeListener(\"end\", rejectCallback);\n this._readable.removeListener(\"close\", rejectCallback);\n\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n };\n\n const readableCallback: () => void = () => {\n const callbackChunk = this._readable.read(size);\n if (callbackChunk) {\n this._position += callbackChunk.length;\n cleanUp();\n // callbackChunk.length maybe less than desired size if the stream ends.\n resolve(this.toUint8Array(callbackChunk));\n }\n };\n\n const rejectCallback: () => void = () => {\n cleanUp();\n reject();\n };\n\n const abortHandler: () => void = () => {\n cleanUp();\n reject(ABORT_ERROR);\n };\n\n this._readable.on(\"readable\", readableCallback);\n this._readable.once(\"error\", rejectCallback);\n this._readable.once(\"end\", rejectCallback);\n this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal!.addEventListener(\"abort\", abortHandler);\n }\n /* eslint-enable @typescript-eslint/no-use-before-define */\n });\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js new file mode 100644 index 0000000..8421375 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncGenerator, __await } from "tslib"; +// TODO: Do a review of non-interfaces +/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */ +import "@azure/core-paging"; +import { AVRO_CODEC_KEY, AVRO_INIT_BYTES, AVRO_SCHEMA_KEY, AVRO_SYNC_MARKER_SIZE, } from "./AvroConstants"; +import { AvroParser, AvroType } from "./AvroParser"; +import { arraysEqual } from "./utils/utils.common"; +export class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return __asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield __await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield __await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield __await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield __await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield __await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield __await(result); + } + }); + } +} +//# sourceMappingURL=AvroReader.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map new file mode 100644 index 0000000..e35d5bd --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReader.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAElC,sCAAsC;AACtC,iEAAiE;AAEjE,OAAO,oBAAoB,CAAC;AAC5B,OAAO,EACL,cAAc,EACd,eAAe,EACf,eAAe,EACf,qBAAqB,GACtB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,cAAc,CAAC;AAGpD,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAanD,MAAM,OAAO,UAAU;IAuCrB,YACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC;QAEhC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;QAChD,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;QAC5C,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;IACrD,CAAC;IAhCD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAGD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IA2BO,KAAK,CAAC,UAAU,CAAC,UAA4B,EAAE;QACrD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;SAChD;QAED,sEAAsE;QACtE,sCAAsC;QACtC,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,iBAAiB;QACjB,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAI,CAAC,CAAC,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE;YAChE,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,6DAA6D;QAC7D,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,mBAAmB;QACnB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAE7C,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;YAC3B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;SAC1E;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,oBAAoB;QACpB,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAElF,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;gBAC1C,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;aAChC;SACF;IACH,CAAC;IAEM,OAAO;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;IAChE,CAAC;IAEa,YAAY,CACxB,UAA4B,EAAE;;YAE9B,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACtB,cAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;aAChC;YAED,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;gBACrB,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;iBACjC,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;gBAErB,IAAI,IAAI,CAAC,sBAAsB,KAAK,CAAC,EAAE;oBACrC,MAAM,MAAM,GAAG,cAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;qBACjC,CAAC,CAAA,CAAC;oBAEH,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;oBACzE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;wBAC3C,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;qBACrD;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;yBACjC,CAAC,CAAA,CAAC;qBACJ;oBAAC,OAAO,GAAQ,EAAE;wBACjB,gCAAgC;wBAChC,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;qBACjC;oBAED,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;wBACpC,oBAAoB;wBACpB,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;qBACnF;iBACF;gBACD,oBAAM,MAAM,CAAA,CAAC;aACd;QACH,CAAC;KAAA;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of non-interfaces\n/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */\n\nimport \"@azure/core-paging\";\nimport {\n AVRO_CODEC_KEY,\n AVRO_INIT_BYTES,\n AVRO_SCHEMA_KEY,\n AVRO_SYNC_MARKER_SIZE,\n} from \"./AvroConstants\";\nimport { AvroParser, AvroType } from \"./AvroParser\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { arraysEqual } from \"./utils/utils.common\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}): Promise {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec === undefined || codec === null || codec === \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset === 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock === 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err: any) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js new file mode 100644 index 0000000..475b1d6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AvroReader } from "./AvroReader"; +export { AvroReadable } from "./AvroReadable"; +export { AvroReadableFromBlob } from "./AvroReadableFromBlob"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map new file mode 100644 index 0000000..ef6000c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AvroReader } from \"./AvroReader\";\nexport { AvroReadable } from \"./AvroReadable\";\nexport { AvroReadableFromBlob } from \"./AvroReadableFromBlob\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js new file mode 100644 index 0000000..15ab942 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AvroReader } from "./AvroReader"; +export { AvroReadable } from "./AvroReadable"; +export { AvroReadableFromStream } from "./AvroReadableFromStream"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map new file mode 100644 index 0000000..c46e866 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AvroReader } from \"./AvroReader\";\nexport { AvroReadable } from \"./AvroReadable\";\nexport { AvroReadableFromStream } from \"./AvroReadableFromStream\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js new file mode 100644 index 0000000..135cf5b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} +//# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map new file mode 100644 index 0000000..2605459 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.common.js","sourceRoot":"","sources":["../../../../../storage-internal-avro/src/utils/utils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,MAAM,UAAU,WAAW,CAAC,CAAa,EAAE,CAAa;IACtD,IAAI,CAAC,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACzB,kCAAkC;IAClC,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI;QAAE,OAAO,KAAK,CAAC;IACzC,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;QAAE,OAAO,KAAK,CAAC;IAExC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACjC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YAAE,OAAO,KAAK,CAAC;KACjC;IACD,OAAO,IAAI,CAAC;AACd,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport interface KeyValuePair {\n key: string;\n value: T;\n}\n\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a === b) return true;\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist/index.js b/node_modules/@azure/storage-blob/dist/index.js new file mode 100644 index 0000000..8f7b878 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist/index.js @@ -0,0 +1,25126 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var coreHttp = require('@azure/core-http'); +var tslib = require('tslib'); +var coreTracing = require('@azure/core-tracing'); +var logger$1 = require('@azure/logger'); +var abortController = require('@azure/abort-controller'); +var os = require('os'); +var crypto = require('crypto'); +var stream = require('stream'); +require('@azure/core-paging'); +var coreLro = require('@azure/core-lro'); +var events = require('events'); +var fs = require('fs'); +var util = require('util'); + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, + type: { + name: "String" + } + } + } + } +}; +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + BlobServiceProperties: BlobServiceProperties, + Logging: Logging, + RetentionPolicy: RetentionPolicy, + Metrics: Metrics, + CorsRule: CorsRule, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + BlobServiceStatistics: BlobServiceStatistics, + GeoReplication: GeoReplication, + ListContainersSegmentResponse: ListContainersSegmentResponse, + ContainerItem: ContainerItem, + ContainerProperties: ContainerProperties, + KeyInfo: KeyInfo, + UserDelegationKey: UserDelegationKey, + FilterBlobSegment: FilterBlobSegment, + FilterBlobItem: FilterBlobItem, + BlobTags: BlobTags, + BlobTag: BlobTag, + SignedIdentifier: SignedIdentifier, + AccessPolicy: AccessPolicy, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + BlobFlatListSegment: BlobFlatListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPropertiesInternal: BlobPropertiesInternal, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobPrefix: BlobPrefix, + BlockLookupList: BlockLookupList, + BlockList: BlockList, + Block: Block, + PageList: PageList, + PageRange: PageRange, + ClearRange: ClearRange, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + QueryFormat: QueryFormat, + DelimitedTextConfiguration: DelimitedTextConfiguration, + JsonTextConfiguration: JsonTextConfiguration, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties +}; +const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +const version = { + parameterPath: "version", + mapper: { + defaultValue: "2023-11-03", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo +}; +const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + } +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } +}; +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + } +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Service. */ +class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$5 +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Container. */ +class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$4 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Blob. */ +class Blob$1 { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp13], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders + }, + 202: { + headersMapper: BlobSetTierHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$3 +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders + } + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders + } + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a PageBlob. */ +class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer: serializer$2 +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource + ], + isXML: true, + serializer: xmlSerializer$2 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a AppendBlob. */ +class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition + ], + mediaType: "binary", + serializer: serializer$1 +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition + ], + isXML: true, + serializer: xmlSerializer$1 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a BlockBlob. */ +class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer +}; + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + */ +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const SDK_VERSION = "12.17.0"; +const SERVICE_VERSION = "2023-11-03"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +const REQUEST_TIMEOUT = 100 * 1000; // In ms +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; + +// Copyright (c) Microsoft Corporation. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + const normalizedUrl = new URL(urlParsed.toString()); + return normalizedUrl.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = coreHttp.URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = coreHttp.URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} +/** + * Escape the blobName but keep path separator ('/'). + */ +function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends coreHttp.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +class TelemetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (coreHttp.isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os__namespace) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} + +// Copyright (c) Microsoft Corporation. +const _defaultHttpClient = new coreHttp.DefaultHttpClient(); +function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} + +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + coreHttp.generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), + coreHttp.logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (coreHttp.isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(coreHttp.disableResponseDecompressionPolicy()); + } + factories.push(coreHttp.isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const packageName = "azure-storage-blob"; +const packageVersion = "12.17.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2023-11-03"; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (coreHttp.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a span using the global tracer. + * @internal + */ +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} + +// Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} + +// Copyright (c) Microsoft Corporation. +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new Blob$1(clientContext); + } + if (!leaseId) { + leaseId = coreHttp.generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +class RetriableReadableStream extends stream.Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * Returns the date and time the blob was created. + * + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + // eslint-disable-line @typescript-eslint/ban-types + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +// Copyright (c) Microsoft Corporation. +class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed. + */ + BlockBlobTier["Cold"] = "Cold"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} + +// Copyright (c) Microsoft Corporation. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreHttp.delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = require("buffer").constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new Blob$1(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreHttp.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new Blob$1(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (coreHttp.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems(offset = 0, count, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset: offset, + count: count, + }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreHttp.generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = coreHttp.isTokenCredential(credential) + ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new coreHttp.WebResource(), + status: 200, + headers: new coreHttp.HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +exports.KnownEncryptionAlgorithmType = void 0; +(function (KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); + +Object.defineProperty(exports, 'BaseRequestPolicy', { + enumerable: true, + get: function () { return coreHttp.BaseRequestPolicy; } +}); +Object.defineProperty(exports, 'HttpHeaders', { + enumerable: true, + get: function () { return coreHttp.HttpHeaders; } +}); +Object.defineProperty(exports, 'RequestPolicyOptions', { + enumerable: true, + get: function () { return coreHttp.RequestPolicyOptions; } +}); +Object.defineProperty(exports, 'RestError', { + enumerable: true, + get: function () { return coreHttp.RestError; } +}); +Object.defineProperty(exports, 'WebResource', { + enumerable: true, + get: function () { return coreHttp.WebResource; } +}); +Object.defineProperty(exports, 'deserializationPolicy', { + enumerable: true, + get: function () { return coreHttp.deserializationPolicy; } +}); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/storage-blob/dist/index.js.map b/node_modules/@azure/storage-blob/dist/index.js.map new file mode 100644 index 0000000..e283075 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/generated/src/models/mappers.ts","../src/generated/src/models/parameters.ts","../src/generated/src/operations/service.ts","../src/generated/src/operations/container.ts","../src/generated/src/operations/blob.ts","../src/generated/src/operations/pageBlob.ts","../src/generated/src/operations/appendBlob.ts","../src/generated/src/operations/blockBlob.ts","../src/log.ts","../src/utils/constants.ts","../src/utils/utils.common.ts","../src/policies/StorageBrowserPolicy.ts","../src/StorageBrowserPolicyFactory.ts","../src/policies/StorageRetryPolicy.ts","../src/StorageRetryPolicyFactory.ts","../src/policies/CredentialPolicy.ts","../src/policies/AnonymousCredentialPolicy.ts","../src/credentials/Credential.ts","../src/credentials/AnonymousCredential.ts","../src/policies/TelemetryPolicy.ts","../src/TelemetryPolicyFactory.ts","../src/utils/cache.ts","../src/policies/StorageBearerTokenChallengeAuthenticationPolicy.ts","../src/Pipeline.ts","../src/policies/StorageSharedKeyCredentialPolicy.ts","../src/credentials/StorageSharedKeyCredential.ts","../src/generated/src/storageClientContext.ts","../src/StorageClient.ts","../src/utils/tracing.ts","../src/sas/BlobSASPermissions.ts","../src/sas/ContainerSASPermissions.ts","../src/credentials/UserDelegationKeyCredential.ts","../src/sas/SasIPRange.ts","../src/sas/SASQueryParameters.ts","../src/sas/BlobSASSignatureValues.ts","../src/BlobLeaseClient.ts","../src/utils/RetriableReadableStream.ts","../src/BlobDownloadResponse.ts","../../storage-internal-avro/src/AvroConstants.ts","../../storage-internal-avro/src/AvroParser.ts","../../storage-internal-avro/src/utils/utils.common.ts","../../storage-internal-avro/src/AvroReader.ts","../../storage-internal-avro/src/AvroReadable.ts","../../storage-internal-avro/src/AvroReadableFromStream.ts","../src/utils/BlobQuickQueryStream.ts","../src/BlobQueryResponse.ts","../src/models.ts","../src/PageBlobRangeResponse.ts","../src/pollers/BlobStartCopyFromUrlPoller.ts","../src/Range.ts","../src/utils/Batch.ts","../../storage-common/src/BuffersStream.ts","../../storage-common/src/PooledBuffer.ts","../../storage-common/src/BufferScheduler.ts","../src/utils/utils.node.ts","../src/Clients.ts","../src/BatchUtils.ts","../src/BatchResponseParser.ts","../src/utils/Mutex.ts","../src/BlobBatch.ts","../src/BlobBatchClient.ts","../src/ContainerClient.ts","../src/sas/AccountSASPermissions.ts","../src/sas/AccountSASResourceTypes.ts","../src/sas/AccountSASServices.ts","../src/sas/AccountSASSignatureValues.ts","../src/BlobServiceClient.ts","../src/generatedModels.ts"],"sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\nexport const BlobServiceProperties: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceProperties\",\n xmlName: \"StorageServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n serializedName: \"Logging\",\n xmlName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n serializedName: \"HourMetrics\",\n xmlName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n serializedName: \"MinuteMetrics\",\n xmlName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n serializedName: \"Cors\",\n xmlName: \"Cors\",\n xmlIsWrapped: true,\n xmlElementName: \"CorsRule\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n serializedName: \"DefaultServiceVersion\",\n xmlName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n serializedName: \"DeleteRetentionPolicy\",\n xmlName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n serializedName: \"StaticWebsite\",\n xmlName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\n\nexport const Logging: coreHttp.CompositeMapper = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n required: true,\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n serializedName: \"Delete\",\n required: true,\n xmlName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n serializedName: \"Read\",\n required: true,\n xmlName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n serializedName: \"Write\",\n required: true,\n xmlName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const RetentionPolicy: coreHttp.CompositeMapper = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"Days\",\n xmlName: \"Days\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const Metrics: coreHttp.CompositeMapper = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n serializedName: \"IncludeAPIs\",\n xmlName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const CorsRule: coreHttp.CompositeMapper = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n serializedName: \"AllowedOrigins\",\n required: true,\n xmlName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n serializedName: \"AllowedMethods\",\n required: true,\n xmlName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n serializedName: \"AllowedHeaders\",\n required: true,\n xmlName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n serializedName: \"ExposedHeaders\",\n required: true,\n xmlName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"MaxAgeInSeconds\",\n required: true,\n xmlName: \"MaxAgeInSeconds\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const StaticWebsite: coreHttp.CompositeMapper = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n serializedName: \"IndexDocument\",\n xmlName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n serializedName: \"ErrorDocument404Path\",\n xmlName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n serializedName: \"DefaultIndexDocumentPath\",\n xmlName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const StorageError: coreHttp.CompositeMapper = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n serializedName: \"Message\",\n xmlName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n serializedName: \"Code\",\n xmlName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobServiceStatistics: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceStatistics\",\n xmlName: \"StorageServiceStats\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n serializedName: \"GeoReplication\",\n xmlName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\n\nexport const GeoReplication: coreHttp.CompositeMapper = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n serializedName: \"Status\",\n required: true,\n xmlName: \"Status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"live\", \"bootstrap\", \"unavailable\"]\n }\n },\n lastSyncOn: {\n serializedName: \"LastSyncTime\",\n required: true,\n xmlName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ListContainersSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListContainersSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n serializedName: \"ContainerItems\",\n required: true,\n xmlName: \"Containers\",\n xmlIsWrapped: true,\n xmlElementName: \"Container\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerItem: coreHttp.CompositeMapper = {\n serializedName: \"ContainerItem\",\n xmlName: \"Container\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n }\n }\n }\n};\n\nexport const ContainerProperties: coreHttp.CompositeMapper = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n publicAccess: {\n serializedName: \"PublicAccess\",\n xmlName: \"PublicAccess\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"HasImmutabilityPolicy\",\n xmlName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"HasLegalHold\",\n xmlName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"DefaultEncryptionScope\",\n xmlName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n serializedName: \"DenyEncryptionScopeOverride\",\n xmlName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"ImmutableStorageWithVersioningEnabled\",\n xmlName: \"ImmutableStorageWithVersioningEnabled\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const KeyInfo: coreHttp.CompositeMapper = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n required: true,\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const UserDelegationKey: coreHttp.CompositeMapper = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n serializedName: \"SignedOid\",\n required: true,\n xmlName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n serializedName: \"SignedTid\",\n required: true,\n xmlName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n serializedName: \"SignedStart\",\n required: true,\n xmlName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n serializedName: \"SignedExpiry\",\n required: true,\n xmlName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n serializedName: \"SignedService\",\n required: true,\n xmlName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n serializedName: \"SignedVersion\",\n required: true,\n xmlName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobSegment: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobSegment\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n where: {\n serializedName: \"Where\",\n required: true,\n xmlName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n serializedName: \"Blobs\",\n required: true,\n xmlName: \"Blobs\",\n xmlIsWrapped: true,\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobItem: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobItem\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n serializedName: \"Tags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\n\nexport const BlobTags: coreHttp.CompositeMapper = {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n serializedName: \"BlobTagSet\",\n required: true,\n xmlName: \"TagSet\",\n xmlIsWrapped: true,\n xmlElementName: \"Tag\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobTag: coreHttp.CompositeMapper = {\n serializedName: \"BlobTag\",\n xmlName: \"Tag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n serializedName: \"Key\",\n required: true,\n xmlName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const SignedIdentifier: coreHttp.CompositeMapper = {\n serializedName: \"SignedIdentifier\",\n xmlName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n serializedName: \"Id\",\n required: true,\n xmlName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n serializedName: \"AccessPolicy\",\n xmlName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\n\nexport const AccessPolicy: coreHttp.CompositeMapper = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n serializedName: \"Permission\",\n xmlName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsFlatSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsFlatSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobFlatListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobFlatListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobItemInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobItemInternal\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n required: true,\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n serializedName: \"Snapshot\",\n required: true,\n xmlName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"VersionId\",\n xmlName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"IsCurrentVersion\",\n xmlName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n blobTags: {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n serializedName: \"ObjectReplicationMetadata\",\n xmlName: \"OrMetadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n hasVersionsOnly: {\n serializedName: \"HasVersionsOnly\",\n xmlName: \"HasVersionsOnly\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobName: coreHttp.CompositeMapper = {\n serializedName: \"BlobName\",\n type: {\n name: \"Composite\",\n className: \"BlobName\",\n modelProperties: {\n encoded: {\n serializedName: \"Encoded\",\n xmlName: \"Encoded\",\n xmlIsAttribute: true,\n type: {\n name: \"Boolean\"\n }\n },\n content: {\n serializedName: \"content\",\n xmlName: \"content\",\n xmlIsMsText: true,\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobPropertiesInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobPropertiesInternal\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n serializedName: \"Creation-Time\",\n xmlName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"Content-Length\",\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"Content-Type\",\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n serializedName: \"Content-Encoding\",\n xmlName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"Content-Language\",\n xmlName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n serializedName: \"Content-Disposition\",\n xmlName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"Cache-Control\",\n xmlName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"BlobType\",\n xmlName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n copyId: {\n serializedName: \"CopyId\",\n xmlName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"CopyStatus\",\n xmlName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n copySource: {\n serializedName: \"CopySource\",\n xmlName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"CopyProgress\",\n xmlName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n serializedName: \"CopyCompletionTime\",\n xmlName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"CopyStatusDescription\",\n xmlName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n serializedName: \"ServerEncrypted\",\n xmlName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n serializedName: \"IncrementalCopy\",\n xmlName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"DestinationSnapshot\",\n xmlName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n serializedName: \"AccessTier\",\n xmlName: \"AccessTier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\",\n \"Cold\"\n ]\n }\n },\n accessTierInferred: {\n serializedName: \"AccessTierInferred\",\n xmlName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"ArchiveStatus\",\n xmlName: \"ArchiveStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"rehydrate-pending-to-hot\",\n \"rehydrate-pending-to-cool\",\n \"rehydrate-pending-to-cold\"\n ]\n }\n },\n customerProvidedKeySha256: {\n serializedName: \"CustomerProvidedKeySha256\",\n xmlName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"EncryptionScope\",\n xmlName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"AccessTierChangeTime\",\n xmlName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n serializedName: \"TagCount\",\n xmlName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry-Time\",\n xmlName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"Sealed\",\n xmlName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"RehydratePriority\",\n xmlName: \"RehydratePriority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessedOn: {\n serializedName: \"LastAccessTime\",\n xmlName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"ImmutabilityPolicyUntilDate\",\n xmlName: \"ImmutabilityPolicyUntilDate\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"ImmutabilityPolicyMode\",\n xmlName: \"ImmutabilityPolicyMode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"LegalHold\",\n xmlName: \"LegalHold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n serializedName: \"Delimiter\",\n xmlName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobHierarchyListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobHierarchyListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n serializedName: \"BlobPrefixes\",\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobPrefix: coreHttp.CompositeMapper = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n }\n }\n }\n};\n\nexport const BlockLookupList: coreHttp.CompositeMapper = {\n serializedName: \"BlockLookupList\",\n xmlName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n serializedName: \"Committed\",\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n serializedName: \"Uncommitted\",\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n serializedName: \"Latest\",\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlockList: coreHttp.CompositeMapper = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n serializedName: \"CommittedBlocks\",\n xmlName: \"CommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n serializedName: \"UncommittedBlocks\",\n xmlName: \"UncommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const Block: coreHttp.CompositeMapper = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n serializedName: \"Size\",\n required: true,\n xmlName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const PageList: coreHttp.CompositeMapper = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageRange: coreHttp.CompositeMapper = {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ClearRange: coreHttp.CompositeMapper = {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const QueryRequest: coreHttp.CompositeMapper = {\n serializedName: \"QueryRequest\",\n xmlName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n serializedName: \"QueryType\",\n required: true,\n xmlName: \"QueryType\",\n type: {\n name: \"String\"\n }\n },\n expression: {\n serializedName: \"Expression\",\n required: true,\n xmlName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n serializedName: \"InputSerialization\",\n xmlName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n serializedName: \"OutputSerialization\",\n xmlName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\n\nexport const QuerySerialization: coreHttp.CompositeMapper = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n serializedName: \"Format\",\n xmlName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\n\nexport const QueryFormat: coreHttp.CompositeMapper = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"delimited\", \"json\", \"arrow\", \"parquet\"]\n }\n },\n delimitedTextConfiguration: {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n },\n parquetTextConfiguration: {\n serializedName: \"ParquetTextConfiguration\",\n xmlName: \"ParquetTextConfiguration\",\n type: {\n name: \"any\"\n }\n }\n }\n }\n};\n\nexport const DelimitedTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n serializedName: \"ColumnSeparator\",\n xmlName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n serializedName: \"FieldQuote\",\n xmlName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n serializedName: \"EscapeChar\",\n xmlName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n serializedName: \"HeadersPresent\",\n xmlName: \"HasHeaders\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const JsonTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ArrowConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n serializedName: \"Schema\",\n required: true,\n xmlName: \"Schema\",\n xmlIsWrapped: true,\n xmlElementName: \"Field\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const ArrowField: coreHttp.CompositeMapper = {\n serializedName: \"ArrowField\",\n xmlName: \"Field\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n serializedName: \"Precision\",\n xmlName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n serializedName: \"Scale\",\n xmlName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n xmlName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n xmlName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n xmlName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"x-ms-immutable-storage-with-versioning-enabled\",\n xmlName: \"x-ms-immutable-storage-with-versioning-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n xmlName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n xmlName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n xmlName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n xmlName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n xmlName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiry: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n xmlName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n defaultValue: \"success\",\n isConstant: true,\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n OperationParameter,\n OperationURLParameter,\n OperationQueryParameter,\n QueryCollectionFormat\n} from \"@azure/core-http\";\nimport {\n BlobServiceProperties as BlobServicePropertiesMapper,\n KeyInfo as KeyInfoMapper,\n QueryRequest as QueryRequestMapper,\n BlobTags as BlobTagsMapper,\n BlockLookupList as BlockLookupListMapper\n} from \"../models/mappers\";\n\nexport const contentType: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobServiceProperties: OperationParameter = {\n parameterPath: \"blobServiceProperties\",\n mapper: BlobServicePropertiesMapper\n};\n\nexport const accept: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const url: OperationURLParameter = {\n parameterPath: \"url\",\n mapper: {\n serializedName: \"url\",\n required: true,\n xmlName: \"url\",\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\n\nexport const restype: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"service\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"properties\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const timeoutInSeconds: OperationQueryParameter = {\n parameterPath: [\"options\", \"timeoutInSeconds\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"timeout\",\n xmlName: \"timeout\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const version: OperationParameter = {\n parameterPath: \"version\",\n mapper: {\n defaultValue: \"2023-11-03\",\n isConstant: true,\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const requestId: OperationParameter = {\n parameterPath: [\"options\", \"requestId\"],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const accept1: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp1: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"stats\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp2: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"list\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prefix: OperationQueryParameter = {\n parameterPath: [\"options\", \"prefix\"],\n mapper: {\n serializedName: \"prefix\",\n xmlName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const marker: OperationQueryParameter = {\n parameterPath: [\"options\", \"marker\"],\n mapper: {\n serializedName: \"marker\",\n xmlName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxPageSize: OperationQueryParameter = {\n parameterPath: [\"options\", \"maxPageSize\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"maxresults\",\n xmlName: \"maxresults\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const include: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListContainersIncludeType\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\"metadata\", \"deleted\", \"system\"]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const keyInfo: OperationParameter = {\n parameterPath: \"keyInfo\",\n mapper: KeyInfoMapper\n};\n\nexport const comp3: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"userdelegationkey\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype1: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"account\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const comp4: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"batch\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const contentLength: OperationParameter = {\n parameterPath: \"contentLength\",\n mapper: {\n serializedName: \"Content-Length\",\n required: true,\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const multipartContentType: OperationParameter = {\n parameterPath: \"multipartContentType\",\n mapper: {\n serializedName: \"Content-Type\",\n required: true,\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp5: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blobs\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const where: OperationQueryParameter = {\n parameterPath: [\"options\", \"where\"],\n mapper: {\n serializedName: \"where\",\n xmlName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype2: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"container\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const metadata: OperationParameter = {\n parameterPath: [\"options\", \"metadata\"],\n mapper: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\n\nexport const access: OperationParameter = {\n parameterPath: [\"options\", \"access\"],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n }\n};\n\nexport const defaultEncryptionScope: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const preventEncryptionScopeOverride: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const leaseId: OperationParameter = {\n parameterPath: [\"options\", \"leaseAccessConditions\", \"leaseId\"],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifModifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifModifiedSince\"],\n mapper: {\n serializedName: \"If-Modified-Since\",\n xmlName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const ifUnmodifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifUnmodifiedSince\"],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n xmlName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const comp6: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"metadata\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp7: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"acl\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const containerAcl: OperationParameter = {\n parameterPath: [\"options\", \"containerAcl\"],\n mapper: {\n serializedName: \"containerAcl\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n};\n\nexport const comp8: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"undelete\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerName: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerName\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n xmlName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerVersion: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerVersion\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n xmlName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp9: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"rename\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContainerName: OperationParameter = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n serializedName: \"x-ms-source-container-name\",\n required: true,\n xmlName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"sourceLeaseId\"],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n xmlName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp10: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"lease\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"acquire\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const duration: OperationParameter = {\n parameterPath: [\"options\", \"duration\"],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const proposedLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"proposedLeaseId\"],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action1: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"release\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const leaseId1: OperationParameter = {\n parameterPath: \"leaseId\",\n mapper: {\n serializedName: \"x-ms-lease-id\",\n required: true,\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action2: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"renew\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action3: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"break\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const breakPeriod: OperationParameter = {\n parameterPath: [\"options\", \"breakPeriod\"],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n xmlName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const action4: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"change\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const proposedLeaseId1: OperationParameter = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n required: true,\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const include1: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListBlobsIncludeItem\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\",\n \"immutabilitypolicy\",\n \"legalhold\",\n \"deletedwithversions\"\n ]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const delimiter: OperationQueryParameter = {\n parameterPath: \"delimiter\",\n mapper: {\n serializedName: \"delimiter\",\n required: true,\n xmlName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const snapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"snapshot\"],\n mapper: {\n serializedName: \"snapshot\",\n xmlName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const versionId: OperationQueryParameter = {\n parameterPath: [\"options\", \"versionId\"],\n mapper: {\n serializedName: \"versionid\",\n xmlName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const range: OperationParameter = {\n parameterPath: [\"options\", \"range\"],\n mapper: {\n serializedName: \"x-ms-range\",\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const rangeGetContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentMD5\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n xmlName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const rangeGetContentCRC64: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentCRC64\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n xmlName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionKey: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKey\"],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n xmlName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionKeySha256: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKeySha256\"],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionAlgorithm: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionAlgorithm\"],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n xmlName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifMatch\"],\n mapper: {\n serializedName: \"If-Match\",\n xmlName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifNoneMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifNoneMatch\"],\n mapper: {\n serializedName: \"If-None-Match\",\n xmlName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifTags: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifTags\"],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n xmlName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deleteSnapshots: OperationParameter = {\n parameterPath: [\"options\", \"deleteSnapshots\"],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n xmlName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\"include\", \"only\"]\n }\n }\n};\n\nexport const blobDeleteType: OperationQueryParameter = {\n parameterPath: [\"options\", \"blobDeleteType\"],\n mapper: {\n serializedName: \"deletetype\",\n xmlName: \"deletetype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp11: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"expiry\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiryOptions: OperationParameter = {\n parameterPath: \"expiryOptions\",\n mapper: {\n serializedName: \"x-ms-expiry-option\",\n required: true,\n xmlName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiresOn: OperationParameter = {\n parameterPath: [\"options\", \"expiresOn\"],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobCacheControl: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobCacheControl\"],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n xmlName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentType: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentType\"],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n xmlName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentMD5\"],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobContentEncoding: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentEncoding\"],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n xmlName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLanguage: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentLanguage\"],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n xmlName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentDisposition: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentDisposition\"],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n xmlName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp12: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"immutabilityPolicies\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const immutabilityPolicyExpiry: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyExpiry\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const immutabilityPolicyMode: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyMode\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n};\n\nexport const comp13: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"legalhold\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const legalHold: OperationParameter = {\n parameterPath: \"legalHold\",\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n required: true,\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionScope: OperationParameter = {\n parameterPath: [\"options\", \"encryptionScope\"],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp14: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"snapshot\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier: OperationParameter = {\n parameterPath: [\"options\", \"tier\"],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\",\n \"Cold\"\n ]\n }\n }\n};\n\nexport const rehydratePriority: OperationParameter = {\n parameterPath: [\"options\", \"rehydratePriority\"],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n }\n};\n\nexport const sourceIfModifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n xmlName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfUnmodifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n xmlName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfMatch: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfMatch\"],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n xmlName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfNoneMatch: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n xmlName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfTags: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfTags\"],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n xmlName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySource: OperationParameter = {\n parameterPath: \"copySource\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobTagsString: OperationParameter = {\n parameterPath: [\"options\", \"blobTagsString\"],\n mapper: {\n serializedName: \"x-ms-tags\",\n xmlName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sealBlob: OperationParameter = {\n parameterPath: [\"options\", \"sealBlob\"],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n xmlName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const legalHold1: OperationParameter = {\n parameterPath: [\"options\", \"legalHold\"],\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const xMsRequiresSync: OperationParameter = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n defaultValue: \"true\",\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentMD5\"],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n xmlName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const copySourceAuthorization: OperationParameter = {\n parameterPath: [\"options\", \"copySourceAuthorization\"],\n mapper: {\n serializedName: \"x-ms-copy-source-authorization\",\n xmlName: \"x-ms-copy-source-authorization\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceTags: OperationParameter = {\n parameterPath: [\"options\", \"copySourceTags\"],\n mapper: {\n serializedName: \"x-ms-copy-source-tag-option\",\n xmlName: \"x-ms-copy-source-tag-option\",\n type: {\n name: \"Enum\",\n allowedValues: [\"REPLACE\", \"COPY\"]\n }\n }\n};\n\nexport const comp15: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"copy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyActionAbortConstant: OperationParameter = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n defaultValue: \"abort\",\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyId: OperationQueryParameter = {\n parameterPath: \"copyId\",\n mapper: {\n serializedName: \"copyid\",\n required: true,\n xmlName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp16: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tier\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier1: OperationParameter = {\n parameterPath: \"tier\",\n mapper: {\n serializedName: \"x-ms-access-tier\",\n required: true,\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\",\n \"Cold\"\n ]\n }\n }\n};\n\nexport const queryRequest: OperationParameter = {\n parameterPath: [\"options\", \"queryRequest\"],\n mapper: QueryRequestMapper\n};\n\nexport const comp17: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"query\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp18: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tags\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tags: OperationParameter = {\n parameterPath: [\"options\", \"tags\"],\n mapper: BlobTagsMapper\n};\n\nexport const transactionalContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentMD5\"],\n mapper: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const transactionalContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobType: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"PageBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLength: OperationParameter = {\n parameterPath: \"blobContentLength\",\n mapper: {\n serializedName: \"x-ms-blob-content-length\",\n required: true,\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const blobSequenceNumber: OperationParameter = {\n parameterPath: [\"options\", \"blobSequenceNumber\"],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const contentType1: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/octet-stream\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body1: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const accept2: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp19: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"page\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const pageWrite: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"update\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThanOrEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n xmlName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThan: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n xmlName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n xmlName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const pageWrite1: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"clear\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceUrl: OperationParameter = {\n parameterPath: \"sourceUrl\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceRange: OperationParameter = {\n parameterPath: \"sourceRange\",\n mapper: {\n serializedName: \"x-ms-source-range\",\n required: true,\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n xmlName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const range1: OperationParameter = {\n parameterPath: \"range\",\n mapper: {\n serializedName: \"x-ms-range\",\n required: true,\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp20: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"pagelist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevsnapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"prevsnapshot\"],\n mapper: {\n serializedName: \"prevsnapshot\",\n xmlName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevSnapshotUrl: OperationParameter = {\n parameterPath: [\"options\", \"prevSnapshotUrl\"],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n xmlName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sequenceNumberAction: OperationParameter = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n serializedName: \"x-ms-sequence-number-action\",\n required: true,\n xmlName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\"max\", \"update\", \"increment\"]\n }\n }\n};\n\nexport const comp21: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"incrementalcopy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType1: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"AppendBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp22: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"appendblock\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxSize: OperationParameter = {\n parameterPath: [\"options\", \"appendPositionAccessConditions\", \"maxSize\"],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n xmlName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const appendPosition: OperationParameter = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n xmlName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const sourceRange1: OperationParameter = {\n parameterPath: [\"options\", \"sourceRange\"],\n mapper: {\n serializedName: \"x-ms-source-range\",\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp23: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"seal\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType2: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"BlockBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceBlobProperties: OperationParameter = {\n parameterPath: [\"options\", \"copySourceBlobProperties\"],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n xmlName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const comp24: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"block\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blockId: OperationQueryParameter = {\n parameterPath: \"blockId\",\n mapper: {\n serializedName: \"blockid\",\n required: true,\n xmlName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blocks: OperationParameter = {\n parameterPath: \"blocks\",\n mapper: BlockLookupListMapper\n};\n\nexport const comp25: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blocklist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const listType: OperationQueryParameter = {\n parameterPath: \"listType\",\n mapper: {\n defaultValue: \"committed\",\n serializedName: \"blocklisttype\",\n required: true,\n xmlName: \"blocklisttype\",\n type: {\n name: \"Enum\",\n allowedValues: [\"committed\", \"uncommitted\", \"all\"]\n }\n }\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse\n} from \"../models\";\n\n/** Class representing a Service. */\nexport class Service {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Service class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobServiceProperties,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getStatisticsOperationSpec\n ) as Promise;\n }\n\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listContainersSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n keyInfo,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getUserDelegationKeyOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ServiceSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst setPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders\n }\n },\n requestBody: Parameters.blobServiceProperties,\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceProperties,\n headersMapper: Mappers.ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getStatisticsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceStatistics,\n headersMapper: Mappers.ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listContainersSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListContainersSegmentResponse,\n headersMapper: Mappers.ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.include\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getUserDelegationKeyOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.UserDelegationKey,\n headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders\n }\n },\n requestBody: Parameters.keyInfo,\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp3\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n ContainerCreateOptionalParams,\n ContainerCreateResponse,\n ContainerGetPropertiesOptionalParams,\n ContainerGetPropertiesResponse,\n ContainerDeleteOptionalParams,\n ContainerDeleteResponse,\n ContainerSetMetadataOptionalParams,\n ContainerSetMetadataResponse,\n ContainerGetAccessPolicyOptionalParams,\n ContainerGetAccessPolicyResponse,\n ContainerSetAccessPolicyOptionalParams,\n ContainerSetAccessPolicyResponse,\n ContainerRestoreOptionalParams,\n ContainerRestoreResponse,\n ContainerRenameOptionalParams,\n ContainerRenameResponse,\n ContainerSubmitBatchOptionalParams,\n ContainerSubmitBatchResponse,\n ContainerFilterBlobsOptionalParams,\n ContainerFilterBlobsResponse,\n ContainerAcquireLeaseOptionalParams,\n ContainerAcquireLeaseResponse,\n ContainerReleaseLeaseOptionalParams,\n ContainerReleaseLeaseResponse,\n ContainerRenewLeaseOptionalParams,\n ContainerRenewLeaseResponse,\n ContainerBreakLeaseOptionalParams,\n ContainerBreakLeaseResponse,\n ContainerChangeLeaseOptionalParams,\n ContainerChangeLeaseResponse,\n ContainerListBlobFlatSegmentOptionalParams,\n ContainerListBlobFlatSegmentResponse,\n ContainerListBlobHierarchySegmentOptionalParams,\n ContainerListBlobHierarchySegmentResponse,\n ContainerGetAccountInfoResponse\n} from \"../models\";\n\n/** Class representing a Container. */\nexport class Container {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Container class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * creates a new container under the specified account. If the container with the same name already\n * exists, the operation fails\n * @param options The options parameters.\n */\n create(\n options?: ContainerCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * returns all user-defined metadata and system properties for the specified container. The data\n * returned does not include the container's list of blobs\n * @param options The options parameters.\n */\n getProperties(\n options?: ContainerGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * operation marks the specified container for deletion. The container and any blobs contained within\n * it are later deleted during garbage collection\n * @param options The options parameters.\n */\n delete(\n options?: ContainerDeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * operation sets one or more user-defined name-value pairs for the specified container.\n * @param options The options parameters.\n */\n setMetadata(\n options?: ContainerSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the permissions for the specified container. The permissions indicate whether container data\n * may be accessed publicly.\n * @param options The options parameters.\n */\n getAccessPolicy(\n options?: ContainerGetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * sets the permissions for the specified container. The permissions indicate whether blobs in a\n * container may be accessed publicly.\n * @param options The options parameters.\n */\n setAccessPolicy(\n options?: ContainerSetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * Restores a previously-deleted container.\n * @param options The options parameters.\n */\n restore(\n options?: ContainerRestoreOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n restoreOperationSpec\n ) as Promise;\n }\n\n /**\n * Renames an existing container.\n * @param sourceContainerName Required. Specifies the name of the container to rename.\n * @param options The options parameters.\n */\n rename(\n sourceContainerName: string,\n options?: ContainerRenameOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceContainerName,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renameOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ContainerSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given\n * search expression. Filter blobs searches within the given container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ContainerFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n acquireLease(\n options?: ContainerAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: ContainerReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: ContainerRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n breakLease(\n options?: ContainerBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: ContainerChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param options The options parameters.\n */\n listBlobFlatSegment(\n options?: ContainerListBlobFlatSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobFlatSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix\n * element in the response body that acts as a placeholder for all blobs whose names begin with the\n * same substring up to the appearance of the delimiter character. The delimiter may be a single\n * character or a string.\n * @param options The options parameters.\n */\n listBlobHierarchySegment(\n delimiter: string,\n options?: ContainerListBlobHierarchySegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n delimiter,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobHierarchySegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.access,\n Parameters.defaultEncryptionScope,\n Parameters.preventEncryptionScopeOverride\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerDeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetMetadataExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp6\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: {\n name: \"Sequence\",\n element: {\n type: { name: \"Composite\", className: \"SignedIdentifier\" }\n }\n },\n serializedName: \"SignedIdentifiers\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\"\n },\n headersMapper: Mappers.ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders\n }\n },\n requestBody: Parameters.containerAcl,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.access,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst restoreOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerRestoreHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRestoreExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp8\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.deletedContainerName,\n Parameters.deletedContainerVersion\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renameOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenameHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenameExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp9\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.sourceContainerName,\n Parameters.sourceLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp4,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ContainerFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobFlatSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsFlatSegmentResponse,\n headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1,\n Parameters.delimiter\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobDownloadOptionalParams,\n BlobDownloadResponse,\n BlobGetPropertiesOptionalParams,\n BlobGetPropertiesResponse,\n BlobDeleteOptionalParams,\n BlobDeleteResponse,\n BlobUndeleteOptionalParams,\n BlobUndeleteResponse,\n BlobExpiryOptions,\n BlobSetExpiryOptionalParams,\n BlobSetExpiryResponse,\n BlobSetHttpHeadersOptionalParams,\n BlobSetHttpHeadersResponse,\n BlobSetImmutabilityPolicyOptionalParams,\n BlobSetImmutabilityPolicyResponse,\n BlobDeleteImmutabilityPolicyOptionalParams,\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetLegalHoldOptionalParams,\n BlobSetLegalHoldResponse,\n BlobSetMetadataOptionalParams,\n BlobSetMetadataResponse,\n BlobAcquireLeaseOptionalParams,\n BlobAcquireLeaseResponse,\n BlobReleaseLeaseOptionalParams,\n BlobReleaseLeaseResponse,\n BlobRenewLeaseOptionalParams,\n BlobRenewLeaseResponse,\n BlobChangeLeaseOptionalParams,\n BlobChangeLeaseResponse,\n BlobBreakLeaseOptionalParams,\n BlobBreakLeaseResponse,\n BlobCreateSnapshotOptionalParams,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLOptionalParams,\n BlobStartCopyFromURLResponse,\n BlobCopyFromURLOptionalParams,\n BlobCopyFromURLResponse,\n BlobAbortCopyFromURLOptionalParams,\n BlobAbortCopyFromURLResponse,\n AccessTier,\n BlobSetTierOptionalParams,\n BlobSetTierResponse,\n BlobGetAccountInfoResponse,\n BlobQueryOptionalParams,\n BlobQueryResponse,\n BlobGetTagsOptionalParams,\n BlobGetTagsResponse,\n BlobSetTagsOptionalParams,\n BlobSetTagsResponse\n} from \"../models\";\n\n/** Class representing a Blob. */\nexport class Blob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Blob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Download operation reads or downloads a blob from the system, including its metadata and\n * properties. You can also call Download to read a snapshot.\n * @param options The options parameters.\n */\n download(\n options?: BlobDownloadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n downloadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system\n * properties for the blob. It does not return the content of the blob.\n * @param options The options parameters.\n */\n getProperties(\n options?: BlobGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is\n * permanently removed from the storage account. If the storage account's soft delete feature is\n * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible\n * immediately. However, the blob service retains the blob or snapshot for the number of days specified\n * by the DeleteRetentionPolicy section of [Storage service properties]\n * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is\n * permanently removed from the storage account. Note that you continue to be charged for the\n * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the\n * \"include=deleted\" query parameter to discover which blobs and snapshots have been soft deleted. You\n * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a\n * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404\n * (ResourceNotFound).\n * @param options The options parameters.\n */\n delete(options?: BlobDeleteOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Undelete a blob that was previously soft deleted\n * @param options The options parameters.\n */\n undelete(\n options?: BlobUndeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n undeleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Sets the time a blob will expire and be deleted.\n * @param expiryOptions Required. Indicates mode of the expiry time\n * @param options The options parameters.\n */\n setExpiry(\n expiryOptions: BlobExpiryOptions,\n options?: BlobSetExpiryOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n expiryOptions,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setExpiryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set HTTP Headers operation sets system properties on the blob\n * @param options The options parameters.\n */\n setHttpHeaders(\n options?: BlobSetHttpHeadersOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setHttpHeadersOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Immutability Policy operation sets the immutability policy on the blob\n * @param options The options parameters.\n */\n setImmutabilityPolicy(\n options?: BlobSetImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Delete Immutability Policy operation deletes the immutability policy on the blob\n * @param options The options parameters.\n */\n deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Legal Hold operation sets a legal hold on the blob.\n * @param legalHold Specified if a legal hold should be set on the blob.\n * @param options The options parameters.\n */\n setLegalHold(\n legalHold: boolean,\n options?: BlobSetLegalHoldOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n legalHold,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setLegalHoldOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more\n * name-value pairs\n * @param options The options parameters.\n */\n setMetadata(\n options?: BlobSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n acquireLease(\n options?: BlobAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: BlobReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: BlobRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: BlobChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n breakLease(\n options?: BlobBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * The Create Snapshot operation creates a read-only snapshot of a blob\n * @param options The options parameters.\n */\n createSnapshot(\n options?: BlobCreateSnapshotOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createSnapshotOperationSpec\n ) as Promise;\n }\n\n /**\n * The Start Copy From URL operation copies a blob or an internet resource to a new blob.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n startCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return\n * a response until the copy is complete.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyFromURL(\n copySource: string,\n options?: BlobCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination\n * blob with zero length and full metadata.\n * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob\n * operation.\n * @param options The options parameters.\n */\n abortCopyFromURL(\n copyId: string,\n options?: BlobAbortCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copyId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n abortCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant storage only). A\n * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block\n * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's\n * ETag.\n * @param tier Indicates the tier to be set on the blob.\n * @param options The options parameters.\n */\n setTier(\n tier: AccessTier,\n options?: BlobSetTierOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n tier,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTierOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Query operation enables users to select/project on blob data by providing simple query\n * expressions.\n * @param options The options parameters.\n */\n query(options?: BlobQueryOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n queryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Tags operation enables users to get the tags associated with a blob.\n * @param options The options parameters.\n */\n getTags(options?: BlobGetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getTagsOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tags operation enables users to set tags on a blob.\n * @param options The options parameters.\n */\n setTags(options?: BlobSetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTagsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst downloadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDownloadExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.rangeGetContentMD5,\n Parameters.rangeGetContentCRC64,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"HEAD\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.blobDeleteType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.deleteSnapshots\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst undeleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobUndeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobUndeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setExpiryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetExpiryExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.expiryOptions,\n Parameters.expiresOn\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setHttpHeadersOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetHttpHeadersHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifUnmodifiedSince,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setLegalHoldOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetLegalHoldHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.legalHold\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetMetadataExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst createSnapshotOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst startCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.tier,\n Parameters.rehydratePriority,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sealBlob,\n Parameters.legalHold1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.xMsRequiresSync,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst abortCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp15,\n Parameters.copyId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.copyActionAbortConstant\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTierOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n 202: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTierExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp16\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.rehydratePriority,\n Parameters.tier1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst queryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobQueryExceptionHeaders\n }\n },\n requestBody: Parameters.queryRequest,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp17\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobTags,\n headersMapper: Mappers.BlobGetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetTagsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobSetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTagsExceptionHeaders\n }\n },\n requestBody: Parameters.tags,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n PageBlobCreateOptionalParams,\n PageBlobCreateResponse,\n PageBlobUploadPagesOptionalParams,\n PageBlobUploadPagesResponse,\n PageBlobClearPagesOptionalParams,\n PageBlobClearPagesResponse,\n PageBlobUploadPagesFromURLOptionalParams,\n PageBlobUploadPagesFromURLResponse,\n PageBlobGetPageRangesOptionalParams,\n PageBlobGetPageRangesResponse,\n PageBlobGetPageRangesDiffOptionalParams,\n PageBlobGetPageRangesDiffResponse,\n PageBlobResizeOptionalParams,\n PageBlobResizeResponse,\n SequenceNumberActionType,\n PageBlobUpdateSequenceNumberOptionalParams,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobCopyIncrementalOptionalParams,\n PageBlobCopyIncrementalResponse\n} from \"../models\";\n\n/** Class representing a PageBlob. */\nexport class PageBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class PageBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create operation creates a new page blob.\n * @param contentLength The length of the request.\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n blobContentLength: number,\n options?: PageBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n uploadPages(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: PageBlobUploadPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Clear Pages operation clears a set of pages from a page blob\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n clearPages(\n contentLength: number,\n options?: PageBlobClearPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n clearPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a\n * URL\n * @param sourceUrl Specify a URL to the copy source.\n * @param sourceRange Bytes of source data in the specified range. The length of this range should\n * match the ContentLength header and x-ms-range/Range destination range header.\n * @param contentLength The length of the request.\n * @param range The range of bytes to which the source range would be written. The range should be 512\n * aligned and range-end is required.\n * @param options The options parameters.\n */\n uploadPagesFromURL(\n sourceUrl: string,\n sourceRange: string,\n contentLength: number,\n range: string,\n options?: PageBlobUploadPagesFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n sourceRange,\n contentLength,\n range,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a\n * page blob\n * @param options The options parameters.\n */\n getPageRanges(\n options?: PageBlobGetPageRangesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were\n * changed between target blob and previous snapshot.\n * @param options The options parameters.\n */\n getPageRangesDiff(\n options?: PageBlobGetPageRangesDiffOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesDiffOperationSpec\n ) as Promise;\n }\n\n /**\n * Resize the Blob\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n resize(\n blobContentLength: number,\n options?: PageBlobResizeOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n resizeOperationSpec\n ) as Promise;\n }\n\n /**\n * Update the sequence number of the blob\n * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.\n * This property applies to page blobs only. This property indicates how the service should modify the\n * blob's sequence number\n * @param options The options parameters.\n */\n updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n options?: PageBlobUpdateSequenceNumberOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sequenceNumberAction,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n updateSequenceNumberOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.\n * The snapshot is copied such that only the differential changes between the previously copied\n * snapshot are transferred to the destination. The copied snapshots are complete copies of the\n * original snapshot and can be read or copied from as usual. This API is supported since REST version\n * 2016-05-31.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyIncremental(\n copySource: string,\n options?: PageBlobCopyIncrementalOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyIncrementalOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType,\n Parameters.blobContentLength,\n Parameters.blobSequenceNumber\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo\n ],\n mediaType: \"binary\",\n serializer\n};\nconst clearPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobClearPagesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.pageWrite1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.sourceUrl,\n Parameters.sourceRange,\n Parameters.sourceContentCrc64,\n Parameters.range1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20,\n Parameters.prevsnapshot\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.prevSnapshotUrl\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst resizeOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobResizeHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobResizeExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.blobContentLength\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobSequenceNumber,\n Parameters.sequenceNumberAction\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyIncrementalOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.copySource\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse\n} from \"../models\";\n\n/** Class representing a AppendBlob. */\nexport class AppendBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class AppendBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: AppendBlobAppendBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(\n options?: AppendBlobSealOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n sealOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst appendBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.maxSize,\n Parameters.appendPosition\n ],\n mediaType: \"binary\",\n serializer\n};\nconst appendBlockFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.transactionalContentMD5,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.maxSize,\n Parameters.appendPosition,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst sealOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.AppendBlobSealHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobSealExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.appendPosition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlockBlobUploadOptionalParams,\n BlockBlobUploadResponse,\n BlockBlobPutBlobFromUrlOptionalParams,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobStageBlockOptionalParams,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLOptionalParams,\n BlockBlobStageBlockFromURLResponse,\n BlockLookupList,\n BlockBlobCommitBlockListOptionalParams,\n BlockBlobCommitBlockListResponse,\n BlockListType,\n BlockBlobGetBlockListOptionalParams,\n BlockBlobGetBlockListResponse\n} from \"../models\";\n\n/** Class representing a BlockBlob. */\nexport class BlockBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class BlockBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing\n * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put\n * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a\n * partial update of the content of a block blob, use the Put Block List operation.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n upload(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobUploadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read\n * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are\n * not supported with Put Blob from URL; the content of an existing blob is overwritten with the\n * content of the new blob. To perform partial updates to a block blob’s contents using a source URL,\n * use the Put Block from URL API in conjunction with Put Block List.\n * @param contentLength The length of the request.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n putBlobFromUrl(\n contentLength: number,\n copySource: string,\n options?: BlockBlobPutBlobFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n putBlobFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n stageBlock(\n blockId: string,\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobStageBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob where the contents\n * are read from a URL.\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param sourceUrl Specify a URL to the copy source.\n * @param options The options parameters.\n */\n stageBlockFromURL(\n blockId: string,\n contentLength: number,\n sourceUrl: string,\n options?: BlockBlobStageBlockFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n sourceUrl,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the\n * blob. In order to be written as part of a blob, a block must have been successfully written to the\n * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading\n * only those blocks that have changed, then committing the new and existing blocks together. You can\n * do this by specifying whether to commit a block from the committed block list or from the\n * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list\n * it may belong to.\n * @param blocks Blob Blocks.\n * @param options The options parameters.\n */\n commitBlockList(\n blocks: BlockLookupList,\n options?: BlockBlobCommitBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blocks,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n commitBlockListOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block\n * blob\n * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted\n * blocks, or both lists together.\n * @param options The options parameters.\n */\n getBlockList(\n listType: BlockListType,\n options?: BlockBlobGetBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n listType,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getBlockListOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst uploadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobUploadExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.blobType2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst putBlobFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags,\n Parameters.transactionalContentMD5,\n Parameters.blobType2,\n Parameters.copySourceBlobProperties\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst stageBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst stageBlockFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst commitBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders\n }\n },\n requestBody: Parameters.blocks,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlockList,\n headersMapper: Mappers.BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp25,\n Parameters.listType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n */\nexport const logger = createClientLogger(\"storage-blob\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const SDK_VERSION: string = \"12.17.0\";\nexport const SERVICE_VERSION: string = \"2023-11-03\";\n\nexport const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES: number = 256 * 1024 * 1024; // 256MB\nexport const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES: number = 4000 * 1024 * 1024; // 4000MB\nexport const BLOCK_BLOB_MAX_BLOCKS: number = 50000;\nexport const DEFAULT_BLOCK_BUFFER_SIZE_BYTES: number = 8 * 1024 * 1024; // 8MB\nexport const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES: number = 4 * 1024 * 1024; // 4MB\nexport const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS: number = 5;\n\nexport const REQUEST_TIMEOUT: number = 100 * 1000; // In ms\n/**\n * The OAuth scope to use with Azure Storage.\n */\nexport const StorageOAuthScopes: string | string[] = \"https://storage.azure.com/.default\";\n\nexport const URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\",\n },\n};\n\nexport const HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416,\n};\n\nexport const HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\",\n};\n\nexport const ETagNone = \"\";\nexport const ETagAny = \"*\";\n\nexport const SIZE_1_MB = 1 * 1024 * 1024;\nexport const BATCH_MAX_REQUEST = 256;\nexport const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nexport const HTTP_LINE_ENDING = \"\\r\\n\";\nexport const HTTP_VERSION_1_1 = \"HTTP/1.1\";\n\nexport const EncryptionAlgorithmAES25 = \"AES256\";\n\nexport const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;\n\nexport const StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\",\n];\n\nexport const StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\",\n];\n\nexport const BlobUsesCustomerSpecifiedEncryptionMsg = \"BlobUsesCustomerSpecifiedEncryption\";\nexport const BlobDoesNotUseCustomerSpecifiedEncryption =\n \"BlobDoesNotUseCustomerSpecifiedEncryption\";\n\n/// List of ports used for path style addressing.\n/// Path style addressing means that storage account is put in URI's Path segment in instead of in host.\nexport const PathStylePorts = [\n \"10000\",\n \"10001\",\n \"10002\",\n \"10003\",\n \"10004\",\n \"10100\",\n \"10101\",\n \"10102\",\n \"10103\",\n \"10104\",\n \"11000\",\n \"11001\",\n \"11002\",\n \"11003\",\n \"11004\",\n \"11100\",\n \"11101\",\n \"11102\",\n \"11103\",\n \"11104\",\n];\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpHeaders, isNode, URLBuilder, TokenCredential } from \"@azure/core-http\";\n\nimport {\n BlobQueryArrowConfiguration,\n BlobQueryCsvTextConfiguration,\n BlobQueryJsonTextConfiguration,\n BlobQueryParquetConfiguration,\n} from \"../Clients\";\nimport {\n QuerySerialization,\n BlobTags,\n BlobName,\n ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse,\n PageRange,\n ClearRange,\n} from \"../generated/src/models\";\nimport {\n DevelopmentConnectionString,\n HeaderConstants,\n PathStylePorts,\n URLConstants,\n} from \"./constants\";\nimport {\n Tags,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n HttpAuthorization,\n} from \"../models\";\nimport {\n ListBlobsFlatSegmentResponseModel,\n BlobItemInternal as BlobItemInternalModel,\n ListBlobsHierarchySegmentResponseModel,\n BlobPrefix as BlobPrefixModel,\n PageBlobGetPageRangesDiffResponseModel,\n PageRangeInfo,\n} from \"../generatedModels\";\n\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nexport function escapeURLPath(url: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path || \"/\";\n\n path = escape(path);\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\nexport interface ConnectionString {\n kind: \"AccountConnString\" | \"SASConnString\";\n url: string;\n accountName: string;\n accountKey?: any;\n accountSas?: string;\n proxyUri?: string; // Development Connection String may contain proxyUri\n}\n\nfunction getProxyUriFromDevConnString(connectionString: string): string {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n let proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n const matchCredentials = connectionString.split(\";\");\n for (const element of matchCredentials) {\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")![1];\n }\n }\n }\n return proxyUri;\n}\n\nexport function getValueInConnString(\n connectionString: string,\n argument:\n | \"BlobEndpoint\"\n | \"AccountName\"\n | \"AccountKey\"\n | \"DefaultEndpointsProtocol\"\n | \"EndpointSuffix\"\n | \"SharedAccessSignature\"\n): string {\n const elements = connectionString.split(\";\");\n for (const element of elements) {\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")![1];\n }\n }\n return \"\";\n}\n\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nexport function extractConnectionStringParts(connectionString: string): ConnectionString {\n let proxyUri = \"\";\n\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n\n // Matching BlobEndpoint in the Account connection string\n let blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n\n if (\n connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1\n ) {\n // Account connection string\n\n let defaultEndpointsProtocol = \"\";\n let accountName = \"\";\n let accountKey = Buffer.from(\"accountKey\", \"base64\");\n let endpointSuffix = \"\";\n\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n const protocol = defaultEndpointsProtocol!.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\n \"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\"\n );\n }\n\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n }\n\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n } else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName,\n accountKey,\n proxyUri,\n };\n } else {\n // SAS connection string\n\n const accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n let accountName = getValueInConnString(connectionString, \"AccountName\");\n // if accountName is empty, try to read it from BlobEndpoint\n if (!accountName) {\n accountName = getAccountNameFromUrl(blobEndpoint);\n }\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n } else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n\n return { kind: \"SASConnString\", url: blobEndpoint, accountName, accountSas };\n }\n}\n\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text: string): string {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nexport function appendToURLPath(url: string, name: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? `${path}${name}` : `${path}/${name}`) : name;\n urlParsed.setPath(path);\n\n const normalizedUrl = new URL(urlParsed.toString());\n\n return normalizedUrl.toString();\n}\n\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nexport function setURLParameter(url: string, name: string, value?: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nexport function getURLParameter(url: string, name: string): string | string[] | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nexport function setURLHost(url: string, host: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPath(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLScheme(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPathAndQuery(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n const pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n\n let queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString !== \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : `?${queryString}`; // Ensure query string start with '?'\n }\n\n return `${pathString}${queryString}`;\n}\n\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nexport function getURLQueries(url: string): { [key: string]: string } {\n let queryString = URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n\n let querySubStrings: string[] = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter((value: string) => {\n const indexOfEqual = value.indexOf(\"=\");\n const lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (\n indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1\n );\n });\n\n const queries: { [key: string]: string } = {};\n for (const querySubString of querySubStrings) {\n const splitResults = querySubString.split(\"=\");\n const key: string = splitResults[0];\n const value: string = splitResults[1];\n queries[key] = value;\n }\n\n return queries;\n}\n\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nexport function appendToURLQuery(url: string, queryParts: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n } else {\n query = queryParts;\n }\n\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nexport function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n const dateString = date.toISOString();\n\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n\n/**\n * Base64 encode.\n *\n * @param content -\n */\nexport function base64encode(content: string): string {\n return !isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n\n/**\n * Base64 decode.\n *\n * @param encodedString -\n */\nexport function base64decode(encodedString: string): string {\n return !isNode ? atob(encodedString) : Buffer.from(encodedString, \"base64\").toString();\n}\n\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nexport function generateBlockID(blockIDPrefix: string, blockIndex: number): string {\n // To generate a 64 bytes base64 string, source string should be 48\n const maxSourceStringLength = 48;\n\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n const maxBlockIndexLength = 6;\n\n const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n const res =\n blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nexport async function delay(\n timeInMs: number,\n aborter?: AbortSignalLike,\n abortError?: Error\n): Promise {\n return new Promise((resolve, reject) => {\n /* eslint-disable-next-line prefer-const */\n let timeout: any;\n\n const abortHandler = () => {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n\n const resolveHandler = () => {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n\n timeout = setTimeout(resolveHandler, timeInMs);\n\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n });\n}\n\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nexport function padStart(\n currentString: string,\n targetLength: number,\n padString: string = \" \"\n): string {\n // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n } else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n\nexport function sanitizeURL(url: string): string {\n let safeURL: string = url;\n if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {\n safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, \"*****\");\n }\n\n return safeURL;\n}\n\nexport function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {\n const headers: HttpHeaders = new HttpHeaders();\n for (const header of originalHeader.headersArray()) {\n if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {\n headers.set(header.name, \"*****\");\n } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {\n headers.set(header.name, sanitizeURL(header.value));\n } else {\n headers.set(header.name, header.value);\n }\n }\n\n return headers;\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nexport function iEqual(str1: string, str2: string): boolean {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nexport function getAccountNameFromUrl(url: string): string {\n const parsedUrl: URLBuilder = URLBuilder.parse(url);\n let accountName;\n try {\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost()!.split(\".\")[0];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath()!.split(\"/\")[1];\n } else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n } catch (error: any) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\n\nexport function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {\n if (parsedUrl.getHost() === undefined) {\n return false;\n }\n\n const host =\n parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? \"\" : \":\" + parsedUrl.getPort());\n\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return (\n /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(\n host\n ) ||\n (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()!))\n );\n}\n\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nexport function toBlobTagsString(tags?: Tags): string | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const tagPairs = [];\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);\n }\n }\n\n return tagPairs.join(\"&\");\n}\n\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nexport function toBlobTags(tags?: Tags): BlobTags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: BlobTags = {\n blobTagSet: [],\n };\n\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n res.blobTagSet.push({\n key,\n value,\n });\n }\n }\n return res;\n}\n\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nexport function toTags(tags?: BlobTags): Tags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: Tags = {};\n for (const blobTag of tags.blobTagSet) {\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nexport function toQuerySerialization(\n textConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration\n | BlobQueryParquetConfiguration\n): QuerySerialization | undefined {\n if (textConfiguration === undefined) {\n return undefined;\n }\n\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false,\n },\n },\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator,\n },\n },\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema,\n },\n },\n };\n case \"parquet\":\n return {\n format: {\n type: \"parquet\",\n },\n };\n\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\n\nexport function parseObjectReplicationRecord(\n objectReplicationRecord?: Record\n): ObjectReplicationPolicy[] | undefined {\n if (!objectReplicationRecord) {\n return undefined;\n }\n\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n\n const orProperties: ObjectReplicationPolicy[] = [];\n for (const key in objectReplicationRecord) {\n const ids = key.split(\"_\");\n const policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n const rule: ObjectReplicationRule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key] as ObjectReplicationStatus,\n };\n const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n } else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule],\n });\n }\n }\n return orProperties;\n}\n\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nexport function attachCredential(thing: T, credential: TokenCredential): T {\n (thing as any).credential = credential;\n return thing;\n}\n\nexport function httpAuthorizationToString(\n httpAuthorization?: HttpAuthorization\n): string | undefined {\n return httpAuthorization ? httpAuthorization.scheme + \" \" + httpAuthorization.value : undefined;\n}\n\nexport function BlobNameToString(name: BlobName): string {\n if (name.encoded) {\n return decodeURIComponent(name.content!);\n } else {\n return name.content!;\n }\n}\n\nexport function ConvertInternalResponseOfListBlobFlat(\n internalResponse: ListBlobsFlatSegmentResponse\n): ListBlobsFlatSegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function ConvertInternalResponseOfListBlobHierarchy(\n internalResponse: ListBlobsHierarchySegmentResponse\n): ListBlobsHierarchySegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefixModel = {\n ...blobPrefixInternal,\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function* ExtractPageRangeInfoItems(\n getPageRangesSegment: PageBlobGetPageRangesDiffResponseModel\n): IterableIterator {\n let pageRange: PageRange[] = [];\n let clearRange: ClearRange[] = [];\n\n if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange;\n if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange;\n\n let pageRangeIndex = 0;\n let clearRangeIndex = 0;\n\n while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {\n if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n ++pageRangeIndex;\n } else {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n ++clearRangeIndex;\n }\n }\n\n for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n }\n\n for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n }\n}\n\n/**\n * Escape the blobName but keep path separator ('/').\n */\nexport function EscapePath(blobName: string): string {\n const split = blobName.split(\"/\");\n for (let i = 0; i < split.length; i++) {\n split[i] = encodeURIComponent(split[i]);\n }\n return split.join(\"/\");\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants, URLConstants } from \"../utils/constants\";\nimport { setURLParameter } from \"../utils/utils.common\";\n\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nexport class StorageBrowserPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n return this._nextPolicy.sendRequest(request);\n }\n\n if (request.method.toUpperCase() === \"GET\" || request.method.toUpperCase() === \"HEAD\") {\n request.url = setURLParameter(\n request.url,\n URLConstants.Parameters.FORCE_BROWSER_NO_CACHE,\n new Date().getTime().toString()\n );\n }\n\n request.headers.remove(HeaderConstants.COOKIE);\n\n // According to XHR standards, content-length should be fully controlled by browsers\n request.headers.remove(HeaderConstants.CONTENT_LENGTH);\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\n\nimport {\n AbortSignalLike,\n BaseRequestPolicy,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RestError,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { StorageRetryOptions } from \"../StorageRetryPolicyFactory\";\nimport { URLConstants } from \"../utils/constants\";\nimport { delay, setURLHost, setURLParameter } from \"../utils/utils.common\";\nimport { logger } from \"../log\";\n\n/**\n * A factory method used to generated a RetryPolicy factory.\n *\n * @param retryOptions -\n */\nexport function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {\n return new StorageRetryPolicy(nextPolicy, options, retryOptions);\n },\n };\n}\n\n/**\n * RetryPolicy types.\n */\nexport enum StorageRetryPolicyType {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n EXPONENTIAL,\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n FIXED,\n}\n\n// Default values of StorageRetryOptions\nconst DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined, // Use server side default timeout strategy\n};\n\nconst RETRY_ABORT_ERROR = new AbortError(\"The operation was aborted.\");\n\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nexport class StorageRetryPolicy extends BaseRequestPolicy {\n /**\n * RetryOptions.\n */\n private readonly retryOptions: StorageRetryOptions;\n\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS\n ) {\n super(nextPolicy, options);\n\n // Initialize retry options\n this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n\n maxTries:\n retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n\n tryTimeoutInMs:\n retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n\n retryDelayInMs:\n retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!\n )\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n\n maxRetryDelayInMs:\n retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost,\n };\n }\n\n /**\n * Sends request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n return this.attemptSendRequest(request, false, 1);\n }\n\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n protected async attemptSendRequest(\n request: WebResource,\n secondaryHas404: boolean,\n attempt: number\n ): Promise {\n const newRequest: WebResource = request.clone();\n\n const isPrimaryRetry =\n secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);\n }\n\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(\n newRequest.url,\n URLConstants.Parameters.TIMEOUT,\n Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()\n );\n }\n\n let response: HttpOperationResponse | undefined;\n try {\n logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? \"Primary\" : \"Secondary\"}`);\n response = await this._nextPolicy.sendRequest(newRequest);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return response;\n }\n\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n } catch (err: any) {\n logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {\n throw err;\n }\n }\n\n await this.delay(isPrimaryRetry, attempt, request.abortSignal);\n return this.attemptSendRequest(request, secondaryHas404, ++attempt);\n }\n\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n protected shouldRetry(\n isPrimaryRetry: boolean,\n attempt: number,\n response?: HttpOperationResponse,\n err?: RestError\n ): boolean {\n if (attempt >= this.retryOptions.maxTries!) {\n logger.info(\n `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions\n .maxTries!}, no further try.`\n );\n return false;\n }\n\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n const retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\", // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (const retriableError of retriableErrors) {\n if (\n err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)\n ) {\n logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);\n return true;\n }\n }\n }\n\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n const statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(`RetryPolicy: Secondary access with 404, will retry.`);\n return true;\n }\n\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);\n return true;\n }\n }\n\n if (err?.code === \"PARSE_ERROR\" && err?.message.startsWith(`Error \"Error: Unclosed root tag`)) {\n logger.info(\n \"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\"\n );\n return true;\n }\n\n return false;\n }\n\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {\n let delayTimeInMs: number = 0;\n\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min(\n (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,\n this.retryOptions.maxRetryDelayInMs!\n );\n break;\n case StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs!;\n break;\n }\n } else {\n delayTimeInMs = Math.random() * 1000;\n }\n\n logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);\n return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BaseRequestPolicy, HttpOperationResponse, WebResource } from \"@azure/core-http\";\n\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nexport abstract class CredentialPolicy extends BaseRequestPolicy {\n /**\n * Sends out request.\n *\n * @param request -\n */\n public sendRequest(request: WebResource): Promise {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n }\n\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nexport class AnonymousCredentialPolicy extends CredentialPolicy {\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpHeaders,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants } from \"../utils/constants\";\n\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nexport class TelemetryPolicy extends BaseRequestPolicy {\n /**\n * Telemetry string.\n */\n public readonly telemetry: string;\n\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {\n super(nextPolicy, options);\n this.telemetry = telemetry;\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"@azure/core-http\";\nimport { IHttpClient } from \"../Pipeline\";\n\nconst _defaultHttpClient = new DefaultHttpClient();\n\nexport function getCachedDefaultHttpClient(): IHttpClient {\n return _defaultHttpClient;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential, URLBuilder } from \"@azure/core-http\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"@azure/core-http\";\nimport { HttpOperationResponse } from \"@azure/core-http\";\nimport { WebResourceLike } from \"@azure/core-http\";\nimport { delay } from \"@azure/core-http\";\n\n/**\n * A set of constants used internally when processing requests.\n */\nconst Constants = {\n DefaultScope: \"/.default\",\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n },\n};\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nconst DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n/**\n * We will retrieve the challenge only if the response status code was 401,\n * and if the response contained the header \"WWW-Authenticate\" with a non-empty value.\n */\nfunction getChallenge(response: HttpOperationResponse): string | undefined {\n const challenge = response.headers.get(\"WWW-Authenticate\");\n if (response.status === 401 && challenge) {\n return challenge;\n }\n return;\n}\n\n/**\n * Challenge structure\n */\ninterface Challenge {\n authorization_uri: string;\n resource_id: string;\n}\n\n/**\n * Converts: `Bearer a=\"b\" c=\"d\"`.\n * Into: `[ { a: 'b', c: 'd' }]`.\n *\n * @internal\n */\nfunction parseChallenge(challenge: string): any {\n const bearerChallenge = challenge.slice(\"Bearer \".length);\n const challengeParts = `${bearerChallenge.trim()} `.split(\" \").filter((x) => x);\n const keyValuePairs = challengeParts.map((keyValue) =>\n (([key, value]) => ({ [key]: value }))(keyValue.trim().split(\"=\"))\n );\n // Key-value pairs to plain object:\n return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {});\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\n\nexport function storageBearerTokenChallengeAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n let getToken = createTokenCycler(credential, scopes);\n\n class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const getTokenInternal = getToken;\n const token = (\n await getTokenInternal({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n })\n ).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n\n const response = await this._nextPolicy.sendRequest(webResource);\n\n if (response?.status === 401) {\n const challenge = getChallenge(response);\n if (challenge) {\n const challengeInfo: Challenge = parseChallenge(challenge);\n const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope;\n const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri);\n const pathSegments = parsedAuthUri.getPath()!.split(\"/\");\n const tenantId = pathSegments[1];\n const getTokenForChallenge = createTokenCycler(credential, challengeScopes);\n\n const tokenForChallenge = (\n await getTokenForChallenge({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n tenantId: tenantId,\n })\n ).token;\n\n getToken = getTokenForChallenge;\n webResource.headers.set(\n Constants.HeaderConstants.AUTHORIZATION,\n `Bearer ${tokenForChallenge}`\n );\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return response;\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n disableResponseDecompressionPolicy,\n HttpClient as IHttpClient,\n HttpHeaders,\n HttpOperationResponse,\n HttpRequestBody,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n ServiceClientOptions,\n WebResource,\n proxyPolicy,\n isNode,\n TokenCredential,\n isTokenCredential,\n tracingPolicy,\n logPolicy,\n ProxyOptions,\n keepAlivePolicy,\n KeepAliveOptions,\n generateClientRequestIdPolicy,\n UserAgentOptions,\n} from \"@azure/core-http\";\n\nimport { logger } from \"./log\";\nimport { StorageBrowserPolicyFactory } from \"./StorageBrowserPolicyFactory\";\nimport { StorageRetryOptions, StorageRetryPolicyFactory } from \"./StorageRetryPolicyFactory\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport {\n StorageOAuthScopes,\n StorageBlobLoggingAllowedHeaderNames,\n StorageBlobLoggingAllowedQueryParameters,\n} from \"./utils/constants\";\nimport { TelemetryPolicyFactory } from \"./TelemetryPolicyFactory\";\nimport { getCachedDefaultHttpClient } from \"./utils/cache\";\nimport { attachCredential } from \"./utils/utils.common\";\nimport { storageBearerTokenChallengeAuthenticationPolicy } from \"./policies/StorageBearerTokenChallengeAuthenticationPolicy\";\n\n// Export following interfaces and types for customers who want to implement their\n// own RequestPolicy or HTTPClient\nexport {\n BaseRequestPolicy,\n StorageOAuthScopes,\n deserializationPolicy,\n IHttpClient,\n HttpHeaders,\n HttpRequestBody,\n HttpOperationResponse,\n WebResource,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n};\n\n/**\n * Option interface for Pipeline constructor.\n */\nexport interface PipelineOptions {\n /**\n * Optional. Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n}\n\n/**\n * An interface for the {@link Pipeline} class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport interface PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n readonly options: PipelineOptions;\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n toServiceClientOptions(): ServiceClientOptions;\n}\n\n/**\n * A helper to decide if a given argument satisfies the Pipeline contract\n * @param pipeline - An argument that may be a Pipeline\n * @returns true when the argument satisfies the Pipeline contract\n */\nexport function isPipelineLike(pipeline: unknown): pipeline is PipelineLike {\n if (!pipeline || typeof pipeline !== \"object\") {\n return false;\n }\n\n const castPipeline = pipeline as PipelineLike;\n\n return (\n Array.isArray(castPipeline.factories) &&\n typeof castPipeline.options === \"object\" &&\n typeof castPipeline.toServiceClientOptions === \"function\"\n );\n}\n\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport class Pipeline implements PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n public readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n public readonly options: PipelineOptions;\n\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = {\n ...options,\n httpClient: options.httpClient || getCachedDefaultHttpClient(),\n };\n }\n\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n public toServiceClientOptions(): ServiceClientOptions {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories,\n };\n }\n}\n\n/**\n * Options interface for the {@link newPipeline} function.\n */\nexport interface StoragePipelineOptions {\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n /**\n * Configures the built-in retry policy behavior.\n */\n retryOptions?: StorageRetryOptions;\n /**\n * Keep alive configurations. Default keep-alive is enabled.\n */\n keepAliveOptions?: KeepAliveOptions;\n /**\n * Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n /**\n * The audience used to retrieve an AAD token.\n * By default, audience 'https://storage.azure.com/.default' will be used.\n */\n audience?: string | string[];\n}\n\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nexport function newPipeline(\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n pipelineOptions: StoragePipelineOptions = {}\n): Pipeline {\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n\n const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n const factories: RequestPolicyFactory[] = [\n tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,\n }),\n ];\n\n if (isNode) {\n // policies only available in Node.js runtime, not in browsers\n factories.push(proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(disableResponseDecompressionPolicy());\n }\n factories.push(\n isTokenCredential(credential)\n ? attachCredential(\n storageBearerTokenChallengeAuthenticationPolicy(\n credential,\n pipelineOptions.audience ?? StorageOAuthScopes\n ),\n credential\n )\n : credential\n );\n\n return new Pipeline(factories, pipelineOptions);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions, WebResource } from \"@azure/core-http\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { HeaderConstants } from \"../utils/constants\";\nimport { getURLPath, getURLQueries } from \"../utils/utils.common\";\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nexport class StorageSharedKeyCredentialPolicy extends CredentialPolicy {\n /**\n * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy\n */\n private readonly factory: StorageSharedKeyCredential;\n\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n factory: StorageSharedKeyCredential\n ) {\n super(nextPolicy, options);\n this.factory = factory;\n }\n\n /**\n * Signs request.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n\n if (\n request.body &&\n (typeof request.body === \"string\" || (request.body as Buffer) !== undefined) &&\n request.body.length > 0\n ) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n\n const stringToSign: string =\n [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE),\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n\n const signature: string = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(\n HeaderConstants.AUTHORIZATION,\n `SharedKey ${this.factory.accountName}:${signature}`\n );\n\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n }\n\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n private getHeaderValueToSign(request: WebResource, headerName: string): string {\n const value = request.headers.get(headerName);\n\n if (!value) {\n return \"\";\n }\n\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n\n return value;\n }\n\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n private getCanonicalizedHeadersString(request: WebResource): string {\n let headersArray = request.headers.headersArray().filter((value) => {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n\n headersArray.sort((a, b): number => {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n\n // Remove duplicate headers\n headersArray = headersArray.filter((value, index, array) => {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n\n let canonicalizedHeadersStringToSign: string = \"\";\n headersArray.forEach((header) => {\n canonicalizedHeadersStringToSign += `${header.name\n .toLowerCase()\n .trimRight()}:${header.value.trimLeft()}\\n`;\n });\n\n return canonicalizedHeadersStringToSign;\n }\n\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n private getCanonicalizedResourceString(request: WebResource): string {\n const path = getURLPath(request.url) || \"/\";\n\n let canonicalizedResourceString: string = \"\";\n canonicalizedResourceString += `/${this.factory.accountName}${path}`;\n\n const queries = getURLQueries(request.url);\n const lowercaseQueries: { [key: string]: string } = {};\n if (queries) {\n const queryKeys: string[] = [];\n for (const key in queries) {\n if (Object.prototype.hasOwnProperty.call(queries, key)) {\n const lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n\n queryKeys.sort();\n for (const key of queryKeys) {\n canonicalizedResourceString += `\\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;\n }\n }\n\n return canonicalizedResourceString;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.17.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2023-11-03\";\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { OperationOptions, RequestOptionsBase } from \"@azure/core-http\";\nimport { createSpanFunction } from \"@azure/core-tracing\";\n\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nexport const createSpan = createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\",\n});\n\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nexport function convertTracingToRequestOptionsBase(\n options?: OperationOptions\n): Pick {\n return {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n spanOptions: (options?.tracingOptions as any)?.spanOptions,\n tracingContext: options?.tracingOptions?.tracingContext,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class BlobSASPermissions {\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n case \"i\":\n blobSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n blobSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission: ${char}`);\n }\n }\n\n return blobSASPermissions;\n }\n\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n blobSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n blobSASPermissions.permanentDelete = true;\n }\n return blobSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Blob SAS permission.\n * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface BlobSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class ContainerSASPermissions {\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n case \"i\":\n containerSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n containerSASPermissions.permanentDelete = true;\n break;\n case \"f\":\n containerSASPermissions.filterByTags = true;\n break;\n default:\n throw new RangeError(`Invalid permission ${char}`);\n }\n }\n\n return containerSASPermissions;\n }\n\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n containerSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n containerSASPermissions.permanentDelete = true;\n }\n if (permissionLike.filterByTags) {\n containerSASPermissions.filterByTags = true;\n }\n return containerSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specifies List access granted.\n */\n public list: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n public filterByTags: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n if (this.filterByTags) {\n permissions.push(\"f\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Container SAS permission.\n * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface ContainerSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specifies List access granted.\n */\n list?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n filterByTags?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allowed IP range for a SAS.\n */\nexport interface SasIPRange {\n /**\n * Starting IP address in the IP range.\n * If end IP doesn't provide, start IP will the only IP allowed.\n */\n start: string;\n /**\n * Optional. IP address that ends the IP range.\n * If not provided, start IP will the only IP allowed.\n */\n end?: string;\n}\n\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nexport function ipRangeToString(ipRange: SasIPRange): string {\n return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * Protocols for generated SAS.\n */\nexport enum SASProtocol {\n /**\n * Protocol that allows HTTPS only\n */\n Https = \"https\",\n\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n HttpsAndHttp = \"https,http\",\n}\n\n/**\n * Options to construct {@link SASQueryParameters}.\n */\nexport interface SASQueryParametersOptions {\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n permissions?: string;\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n services?: string;\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n resourceTypes?: string;\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n protocol?: SASProtocol;\n /**\n * Optional. The start time for this SAS token.\n */\n startsOn?: Date;\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n expiresOn?: Date;\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n resource?: string;\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n cacheControl?: string;\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n contentDisposition?: string;\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n contentEncoding?: string;\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n contentLanguage?: string;\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n contentType?: string;\n /**\n * User delegation key properties.\n */\n userDelegationKey?: UserDelegationKey;\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}.\n * This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nexport class SASQueryParameters {\n /**\n * The storage API version.\n */\n public readonly version: string;\n\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n public readonly protocol?: SASProtocol;\n\n /**\n * Optional. The start time for this SAS token.\n */\n public readonly startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n public readonly expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n public readonly permissions?: string;\n\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n public readonly services?: string;\n\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n public readonly resourceTypes?: string;\n\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n public readonly identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n public readonly encryptionScope?: string;\n\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n public readonly resource?: string;\n\n /**\n * The signature for the SAS token.\n */\n public readonly signature: string;\n\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n public readonly cacheControl?: string;\n\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n public readonly contentDisposition?: string;\n\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n public readonly contentEncoding?: string;\n\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n public readonly contentLanguage?: string;\n\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n public readonly contentType?: string;\n\n /**\n * Inner value of getter ipRange.\n */\n private readonly ipRangeInner?: SasIPRange;\n\n /**\n * The Azure Active Directory object ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedOid?: string;\n\n /**\n * The Azure Active Directory tenant ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedTenantId?: string;\n\n /**\n * The date-time the key is active.\n * Property of user delegation key.\n */\n private readonly signedStartsOn?: Date;\n\n /**\n * The date-time the key expires.\n * Property of user delegation key.\n */\n private readonly signedExpiresOn?: Date;\n\n /**\n * Abbreviation of the Azure Storage service that accepts the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedService?: string;\n\n /**\n * The service version that created the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedVersion?: string;\n\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This is only used for User Delegation SAS.\n */\n public readonly preauthorizedAgentObjectId?: string;\n\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n public readonly correlationId?: string;\n\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n public get ipRange(): SasIPRange | undefined {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start,\n };\n }\n return undefined;\n }\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param permissions - Representing the storage permissions\n * @param services - Representing the storage services being accessed (only for Account SAS)\n * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS)\n * @param protocol - Representing the allowed HTTP protocol(s)\n * @param startsOn - Representing the start time for this SAS token\n * @param expiresOn - Representing the expiry time for this SAS token\n * @param ipRange - Representing the range of valid IP addresses for this SAS token\n * @param identifier - Representing the signed identifier (only for Service SAS)\n * @param resource - Representing the storage container or blob (only for Service SAS)\n * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS)\n * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS)\n * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS)\n * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS)\n * @param contentType - Representing the content-type header (only for Blob/File Service SAS)\n * @param userDelegationKey - Representing the user delegation key properties\n * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS)\n * @param correlationId - Representing the correlation ID (only for User Delegation SAS)\n * @param encryptionScope -\n */\n constructor(\n version: string,\n signature: string,\n permissions?: string,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n );\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param options - Optional. Options to construct the SASQueryParameters.\n */\n constructor(version: string, signature: string, options?: SASQueryParametersOptions);\n\n constructor(\n version: string,\n signature: string,\n permissionsOrOptions?: string | SASQueryParametersOptions,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n ) {\n this.version = version;\n this.signature = signature;\n\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.encryptionScope = permissionsOrOptions.encryptionScope;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n } else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.encryptionScope = encryptionScope;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n public toString(): string {\n const params: string[] = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"ses\",\n \"skoid\", // Signed object ID\n \"sktid\", // Signed tenant ID\n \"skt\", // Signed key start time\n \"ske\", // Signed key expiry time\n \"sks\", // Signed key service\n \"skv\", // Signed key version\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\",\n ];\n const queries: string[] = [];\n\n for (const param of params) {\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined\n );\n break;\n case \"se\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined\n );\n break;\n case \"sip\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.ipRange ? ipRangeToString(this.ipRange) : undefined\n );\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"ses\":\n this.tryAppendQueryParameter(queries, param, this.encryptionScope);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined\n );\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined\n );\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n }\n\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n private tryAppendQueryParameter(queries: string[], key: string, value?: string): void {\n if (!value) {\n return;\n }\n\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(`${key}=${value}`);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { BlobSASPermissions } from \"./BlobSASPermissions\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\nimport { ContainerSASPermissions } from \"./ContainerSASPermissions\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { UserDelegationKeyCredential } from \"../credentials/UserDelegationKeyCredential\";\nimport { ipRangeToString, SasIPRange } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs.\n */\nexport interface BlobSASSignatureValues {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource\n * being accessed for help constructing the permissions string.\n */\n permissions?: BlobSASPermissions | ContainerSASPermissions;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * The name of the container the SAS user may access.\n */\n containerName: string;\n\n /**\n * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided.\n */\n blobName?: string;\n\n /**\n * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09.\n */\n snapshotTime?: string;\n\n /**\n * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10.\n */\n versionId?: string;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user\n * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will\n * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission\n * check for the user specified in this value will be performed. This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to\n * correlate SAS generation with storage resource access. This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * Fill in the required details before running the following snippets.\n *\n * Example usage:\n *\n * ```js\n * // Generate service level SAS for a container\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using an identifier:\n *\n * ```js\n * // Generate service level SAS for a container with identifier\n * // startsOn & permissions are optional when identifier is provided\n * const identifier = \"unique-id\";\n * await containerClient.setAccessPolicy(undefined, [\n * {\n * accessPolicy: {\n * expiresOn: new Date(new Date().valueOf() + 86400), // Date type\n * permissions: ContainerSASPermissions.parse(\"racwdl\").toString(),\n * startsOn: new Date() // Date type\n * },\n * id: identifier\n * }\n * ]);\n *\n * const containerSAS = generateBlobSASQueryParameters(\n * {\n * containerName, // Required\n * identifier // Required\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using a blob name:\n *\n * ```js\n * // Generate service level SAS for a blob\n * const blobSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * blobName, // Required\n * permissions: BlobSASPermissions.parse(\"racwd\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * cacheControl: \"cache-control-override\", // Optional\n * contentDisposition: \"content-disposition-override\", // Optional\n * contentEncoding: \"content-encoding-override\", // Optional\n * contentLanguage: \"content-language-override\", // Optional\n * contentType: \"content-type-override\", // Optional\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters;\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required.\n *\n * Example usage:\n *\n * ```js\n * // Generate user delegation SAS for a container\n * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn, // Optional. Date type\n * expiresOn, // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2018-11-09\" // Must greater than or equal to 2018-11-09 to generate user delegation SAS\n * },\n * userDelegationKey, // UserDelegationKey\n * accountName\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`\n * @param accountName -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKey: UserDelegationKey,\n accountName: string\n): SASQueryParameters;\n\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredentialOrUserDelegationKey: StorageSharedKeyCredential | UserDelegationKey,\n accountName?: string\n): SASQueryParameters {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n\n const sharedKeyCredential =\n sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n let userDelegationKeyCredential: UserDelegationKeyCredential | undefined;\n\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(\n accountName,\n sharedKeyCredentialOrUserDelegationKey as UserDelegationKey\n );\n }\n\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n\n // Version 2020-12-06 adds support for encryptionscope in SAS.\n if (version >= \"2020-12-06\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);\n } else {\n return generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n } else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n } else {\n return generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n }\n\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n } else {\n throw new RangeError(\n \"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\"\n );\n }\n }\n\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n undefined,\n undefined,\n undefined,\n blobSASSignatureValues.encryptionScope\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.encryptionScope\n );\n}\n\nfunction getCanonicalName(accountName: string, containerName: string, blobName?: string): string {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n const elements: string[] = [`/blob/${accountName}/${containerName}`];\n if (blobName) {\n elements.push(`/${blobName}`);\n }\n return elements.join(\"\");\n}\n\nfunction SASSignatureValuesSanityCheckAndAutofill(\n blobSASSignatureValues: BlobSASSignatureValues\n): BlobSASSignatureValues {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'y' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)\n ) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n\n if (\n version < \"2021-04-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions as ContainerSASPermissions).filterByTags\n ) {\n throw RangeError(\"'version' must be >= '2021-04-10' when providing the 'f' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)\n ) {\n throw RangeError(\n \"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\"\n );\n }\n\n if (blobSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { generateUuid, HttpResponse } from \"@azure/core-http\";\nimport { StorageClientContext } from \"./generated/src/index\";\nimport { ContainerBreakLeaseOptionalParams } from \"./generatedModels\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Blob as StorageBlob, Container } from \"./generated/src/operations\";\nimport { ModifiedAccessConditions } from \"./models\";\nimport { CommonOptions } from \"./StorageClient\";\nimport { ETagNone } from \"./utils/constants\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobClient } from \"./Clients\";\nimport { ContainerClient } from \"./ContainerClient\";\n\n/**\n * The details for a specific lease.\n */\nexport interface Lease {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally. If the request version is 2011-08-18 or\n * newer, the ETag value will be in quotes.\n */\n etag?: string;\n /**\n * Returns the date and time the container was\n * last modified. Any operation that modifies the blob, including an update\n * of the blob's metadata or properties, changes the last-modified time of\n * the blob.\n */\n lastModified?: Date;\n /**\n * Uniquely identifies a container's lease\n */\n leaseId?: string;\n /**\n * Approximate time remaining in the lease\n * period, in seconds.\n */\n leaseTime?: number;\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n */\n requestId?: string;\n /**\n * Indicates the version of the Blob service used\n * to execute the request. This header is returned for requests made against\n * version 2009-09-19 and above.\n */\n version?: string;\n /**\n * UTC date/time value generated by the service that\n * indicates the time at which the response was initiated\n */\n date?: Date;\n /**\n * Error code if any associated with the response that returned\n * the Lease information.\n */\n errorCode?: string;\n}\n\n/**\n * Contains the response data for operations that create, modify, or delete a lease.\n *\n * See {@link BlobLeaseClient}.\n */\nexport type LeaseOperationResponse = Lease & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: Lease;\n };\n};\n\n/**\n * Configures lease operations.\n */\nexport interface LeaseOperationOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nexport class BlobLeaseClient {\n private _leaseId: string;\n private _url: string;\n private _containerOrBlobOperation: Container | StorageBlob;\n private _isContainer: boolean;\n\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n public get leaseId(): string {\n return this._leaseId;\n }\n\n /**\n * Gets the url.\n *\n * @readonly\n */\n public get url(): string {\n return this._url;\n }\n\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n constructor(client: ContainerClient | BlobClient, leaseId?: string) {\n const clientContext = new StorageClientContext(\n client.url,\n (client as any).pipeline.toServiceClientOptions()\n );\n this._url = client.url;\n\n if ((client as BlobClient).name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n } else {\n this._isContainer = false;\n this._containerOrBlobOperation = new StorageBlob(clientContext);\n }\n\n if (!leaseId) {\n leaseId = generateUuid();\n }\n this._leaseId = leaseId;\n }\n\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n public async acquireLease(\n duration: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-acquireLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n proposedLeaseId: this._leaseId,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n public async changeLease(\n proposedLeaseId: string,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-changeLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const response = await this._containerOrBlobOperation.changeLease(\n this._leaseId,\n proposedLeaseId,\n {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n this._leaseId = proposedLeaseId;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n public async releaseLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-releaseLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n public async renewLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-renewLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n public async breakLease(\n breakPeriod: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-breakLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const operationOptions: ContainerBreakLeaseOptionalParams = {\n abortSignal: options.abortSignal,\n breakPeriod,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n };\n return await this._containerOrBlobOperation.breakLease(operationOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { TransferProgressEvent } from \"@azure/core-http\";\nimport { Readable } from \"stream\";\n\nexport type ReadableStreamGetter = (offset: number) => Promise;\n\nexport interface RetriableReadableStreamOptions {\n /**\n * Max retry count (greater than or equal to 0), undefined or invalid value means no retry\n */\n maxRetryRequests?: number;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Debug purpose only. Used to inject an unexpected end to existing internal stream,\n * to test stream retry works well or not.\n *\n * When assign it to true, for next incoming \"data\" event of internal stream,\n * RetriableReadableStream will try to emit an \"end\" event to existing internal\n * stream to force it end and start retry from the breaking point.\n * The value will then update to \"undefined\", once the injection works.\n */\n doInjectErrorOnce?: boolean;\n\n /**\n * A threshold, not a limit. Dictates the amount of data that a stream buffers before it stops asking for more data.\n */\n highWaterMark?: number;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nexport class RetriableReadableStream extends Readable {\n private start: number;\n private offset: number;\n private end: number;\n private getter: ReadableStreamGetter;\n private source: NodeJS.ReadableStream;\n private retries: number = 0;\n private maxRetryRequests: number;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private options: RetriableReadableStreamOptions;\n\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n public constructor(\n source: NodeJS.ReadableStream,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n super({ highWaterMark: options.highWaterMark });\n this.getter = getter;\n this.source = source;\n this.start = offset;\n this.offset = offset;\n this.end = offset + count - 1;\n this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n this.onProgress = options.onProgress;\n this.options = options;\n\n this.setSourceEventHandlers();\n }\n\n public _read(): void {\n this.source.resume();\n }\n\n private setSourceEventHandlers() {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private removeSourceEventHandlers() {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private sourceDataHandler = (data: Buffer) => {\n if (this.options.doInjectErrorOnce) {\n this.options.doInjectErrorOnce = undefined;\n this.source.pause();\n this.source.removeAllListeners(\"data\");\n this.source.emit(\"end\");\n return;\n }\n\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n this.offset += data.length;\n if (this.onProgress) {\n this.onProgress({ loadedBytes: this.offset - this.start });\n }\n if (!this.push(data)) {\n this.source.pause();\n }\n };\n\n private sourceErrorOrEndHandler = (err?: Error) => {\n if (err && err.name === \"AbortError\") {\n this.destroy(err);\n return;\n }\n\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n this.removeSourceEventHandlers();\n if (this.offset - 1 === this.end) {\n this.push(null);\n } else if (this.offset <= this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (this.retries < this.maxRetryRequests) {\n this.retries += 1;\n this.getter(this.offset)\n .then((newSource) => {\n this.source = newSource;\n this.setSourceEventHandlers();\n return;\n })\n .catch((error) => {\n this.destroy(error);\n });\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${\n this.offset - 1\n }, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${\n this.maxRetryRequests\n }`\n )\n );\n }\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: Received more data than original request, data needed offset is ${\n this.end\n }, received offset: ${this.offset - 1}`\n )\n );\n }\n };\n\n _destroy(error: Error | null, callback: (error?: Error) => void): void {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n (this.source as Readable).destroy();\n\n callback(error === null ? undefined : error);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { HttpResponse, isNode } from \"@azure/core-http\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\n\nimport {\n BlobDownloadHeaders,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n} from \"./generatedModels\";\nimport { BlobDownloadResponseParsed, Metadata, ObjectReplicationPolicy } from \"./models\";\nimport {\n ReadableStreamGetter,\n RetriableReadableStream,\n RetriableReadableStreamOptions,\n} from \"./utils/RetriableReadableStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nexport class BlobDownloadResponse implements BlobDownloadResponseParsed {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return this.originalResponse.copyCompletedOn;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n public get tagCount(): number | undefined {\n return this.originalResponse.tagCount;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n public get lastAccessed(): Date | undefined {\n return this.originalResponse.lastAccessed;\n }\n\n /**\n * Returns the date and time the blob was created.\n *\n * @readonly\n */\n public get createdOn(): Date | undefined {\n return this.originalResponse.createdOn;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n public get versionId(): string | undefined {\n return this.originalResponse.versionId;\n }\n\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n public get isCurrentVersion(): boolean | undefined {\n return this.originalResponse.isCurrentVersion;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n public get objectReplicationDestinationPolicyId(): string | undefined {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n }\n\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n public get objectReplicationSourceProperties(): ObjectReplicationPolicy[] | undefined {\n return this.originalResponse.objectReplicationSourceProperties;\n }\n\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n public get isSealed(): boolean | undefined {\n return this.originalResponse.isSealed;\n }\n\n /**\n * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.\n *\n * @readonly\n */\n public get immutabilityPolicyExpiresOn(): Date | undefined {\n return this.originalResponse.immutabilityPolicyExpiresOn;\n }\n\n /**\n * Indicates immutability policy mode.\n *\n * @readonly\n */\n public get immutabilityPolicyMode(): BlobImmutabilityPolicyMode | undefined {\n return this.originalResponse.immutabilityPolicyMode;\n }\n\n /**\n * Indicates if a legal hold is present on the blob.\n *\n * @readonly\n */\n public get legalHold(): boolean | undefined {\n return this.originalResponse.legalHold;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get contentAsBlob(): Promise | undefined {\n return this.originalResponse.blobBody;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobDownloadResponseParsed;\n private blobDownloadStream?: RetriableReadableStream;\n\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n public constructor(\n originalResponse: BlobDownloadResponseParsed,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(\n this.originalResponse.readableStreamBody!,\n getter,\n offset,\n count,\n options\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const AVRO_SYNC_MARKER_SIZE: number = 16;\nexport const AVRO_INIT_BYTES: Uint8Array = new Uint8Array([79, 98, 106, 1]);\nexport const AVRO_CODEC_KEY: string = \"avro.codec\";\nexport const AVRO_SCHEMA_KEY: string = \"avro.schema\";\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of the Object usage and non-interfaces\n/* eslint-disable @typescript-eslint/ban-types, @azure/azure-sdk/ts-use-interface-parameters */\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { KeyValuePair } from \"./utils/utils.common\";\n\n/**\n * Options to configure the AvroParser read methods.\n * See {@link AvroParser.readFixedBytes}, {@link AvroParser.readMap} and etc.\n */\ninterface AvroParserReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroParser {\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n public static async readFixedBytes(\n stream: AvroReadable,\n length: number,\n options: AvroParserReadOptions = {}\n ): Promise {\n const bytes = await stream.read(length, { abortSignal: options.abortSignal });\n if (bytes.length !== length) {\n throw new Error(\"Hit stream end.\");\n }\n return bytes;\n }\n\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n private static async readByte(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const buf = await AvroParser.readFixedBytes(stream, 1, options);\n return buf[0];\n }\n\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n private static async readZigZagLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n let zigZagEncoded = 0;\n let significanceInBit = 0;\n let byte, haveMoreByte, significanceInFloat;\n\n do {\n byte = await AvroParser.readByte(stream, options);\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers\n\n if (haveMoreByte) {\n // Switch to float arithmetic\n // eslint-disable-next-line no-self-assign\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n do {\n byte = await AvroParser.readByte(stream, options);\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n } while (byte & 0x80);\n\n const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return res;\n }\n\n return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);\n }\n\n public static async readLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readInt(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readNull(): Promise {\n return null;\n }\n\n public static async readBoolean(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const b = await AvroParser.readByte(stream, options);\n if (b === 1) {\n return true;\n } else if (b === 0) {\n return false;\n } else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n\n public static async readFloat(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 4, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat32(0, true); // littleEndian = true\n }\n\n public static async readDouble(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 8, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat64(0, true); // littleEndian = true\n }\n\n public static async readBytes(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const size = await AvroParser.readLong(stream, options);\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n\n return stream.read(size, { abortSignal: options.abortSignal });\n }\n\n public static async readString(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readBytes(stream, options);\n const utf8decoder = new TextDecoder();\n return utf8decoder.decode(u8arr);\n }\n\n private static async readMapPair(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const key = await AvroParser.readString(stream, options);\n // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.\n const value = await readItemMethod(stream, options);\n return { key, value };\n }\n\n public static async readMap(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const readPairMethod = (\n s: AvroReadable,\n opts: AvroParserReadOptions = {}\n ): Promise> => {\n return AvroParser.readMapPair(s, readItemMethod, opts);\n };\n\n const pairs: KeyValuePair[] = await AvroParser.readArray(stream, readPairMethod, options);\n\n const dict: Record = {};\n for (const pair of pairs) {\n dict[pair.key] = pair.value;\n }\n return dict;\n }\n\n private static async readArray(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise {\n const items: T[] = [];\n for (\n let count = await AvroParser.readLong(stream, options);\n count !== 0;\n count = await AvroParser.readLong(stream, options)\n ) {\n if (count < 0) {\n // Ignore block sizes\n await AvroParser.readLong(stream, options);\n count = -count;\n }\n\n while (count--) {\n const item: T = await readItemMethod(stream, options);\n items.push(item);\n }\n }\n return items;\n }\n}\n\ninterface RecordField {\n name: string;\n type: string | ObjectSchema | (string | ObjectSchema)[]; // Unions may not immediately contain other unions.\n}\n\nenum AvroComplex {\n RECORD = \"record\",\n ENUM = \"enum\",\n ARRAY = \"array\",\n MAP = \"map\",\n UNION = \"union\",\n FIXED = \"fixed\",\n}\n\ninterface ObjectSchema {\n type: Exclude;\n name?: string;\n aliases?: string;\n fields?: RecordField[];\n symbols?: string[];\n values?: string;\n size?: number;\n}\n\nenum AvroPrimitive {\n NULL = \"null\",\n BOOLEAN = \"boolean\",\n INT = \"int\",\n LONG = \"long\",\n FLOAT = \"float\",\n DOUBLE = \"double\",\n BYTES = \"bytes\",\n STRING = \"string\",\n}\n\nexport abstract class AvroType {\n /**\n * Reads an object from the stream.\n */\n public abstract read(\n stream: AvroReadable,\n options?: AvroParserReadOptions\n ): Promise; // eslint-disable-line @typescript-eslint/ban-types\n\n /**\n * Determines the AvroType from the Avro Schema.\n */\n public static fromSchema(schema: string | Object): AvroType {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n } else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n } else {\n return AvroType.fromObjectSchema(schema as ObjectSchema);\n }\n }\n\n private static fromStringSchema(schema: string): AvroType {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema as AvroPrimitive);\n default:\n throw new Error(`Unexpected Avro type ${schema}`);\n }\n }\n\n private static fromArraySchema(schema: any[]): AvroType {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n }\n\n private static fromObjectSchema(schema: ObjectSchema): AvroType {\n const type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n } catch (err: any) {\n // eslint-disable-line no-empty\n }\n\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.name) {\n throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);\n }\n\n // eslint-disable-next-line no-case-declarations\n const fields: Record = {};\n if (!schema.fields) {\n throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);\n }\n for (const field of schema.fields) {\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.symbols) {\n throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(`Unexpected Avro type ${type} in ${schema}`);\n }\n }\n}\n\nclass AvroPrimitiveType extends AvroType {\n private _primitive: AvroPrimitive;\n\n constructor(primitive: AvroPrimitive) {\n super();\n this._primitive = primitive;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n switch (this._primitive) {\n case AvroPrimitive.NULL:\n return AvroParser.readNull();\n case AvroPrimitive.BOOLEAN:\n return AvroParser.readBoolean(stream, options);\n case AvroPrimitive.INT:\n return AvroParser.readInt(stream, options);\n case AvroPrimitive.LONG:\n return AvroParser.readLong(stream, options);\n case AvroPrimitive.FLOAT:\n return AvroParser.readFloat(stream, options);\n case AvroPrimitive.DOUBLE:\n return AvroParser.readDouble(stream, options);\n case AvroPrimitive.BYTES:\n return AvroParser.readBytes(stream, options);\n case AvroPrimitive.STRING:\n return AvroParser.readString(stream, options);\n default:\n throw new Error(\"Unknown Avro Primitive\");\n }\n }\n}\n\nclass AvroEnumType extends AvroType {\n private readonly _symbols: string[];\n\n constructor(symbols: string[]) {\n super();\n this._symbols = symbols;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const value = await AvroParser.readInt(stream, options);\n return this._symbols[value];\n }\n}\n\nclass AvroUnionType extends AvroType {\n private readonly _types: AvroType[];\n\n constructor(types: AvroType[]) {\n super();\n this._types = types;\n }\n\n public async read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n // eslint-disable-line @typescript-eslint/ban-types\n const typeIndex = await AvroParser.readInt(stream, options);\n return this._types[typeIndex].read(stream, options);\n }\n}\n\nclass AvroMapType extends AvroType {\n private readonly _itemType: AvroType;\n\n constructor(itemType: AvroType) {\n super();\n this._itemType = itemType;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const readItemMethod = (\n s: AvroReadable,\n opts?: AvroParserReadOptions\n ): Promise => {\n return this._itemType.read(s, opts);\n };\n return AvroParser.readMap(stream, readItemMethod, options);\n }\n}\n\nclass AvroRecordType extends AvroType {\n private readonly _name: string;\n private readonly _fields: Record;\n\n constructor(fields: Record, name: string) {\n super();\n this._fields = fields;\n this._name = name;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const record: Record = {};\n record[\"$schema\"] = this._name;\n for (const key in this._fields) {\n if (Object.prototype.hasOwnProperty.call(this._fields, key)) {\n record[key] = await this._fields[key].read(stream, options);\n }\n }\n return record;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport interface KeyValuePair {\n key: string;\n value: T;\n}\n\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a === b) return true;\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of non-interfaces\n/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */\n\nimport \"@azure/core-paging\";\nimport {\n AVRO_CODEC_KEY,\n AVRO_INIT_BYTES,\n AVRO_SCHEMA_KEY,\n AVRO_SYNC_MARKER_SIZE,\n} from \"./AvroConstants\";\nimport { AvroParser, AvroType } from \"./AvroParser\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { arraysEqual } from \"./utils/utils.common\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}): Promise {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec === undefined || codec === null || codec === \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset === 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock === 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err: any) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReadable.read} operation.\n */\nexport interface AvroReadableReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport abstract class AvroReadable {\n public abstract get position(): number;\n public abstract read(size: number, options?: AvroReadableReadOptions): Promise;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro stream was aborted.\");\n\nexport class AvroReadableFromStream extends AvroReadable {\n private _position: number;\n private _readable: NodeJS.ReadableStream;\n\n private toUint8Array(data: string | Buffer): Uint8Array {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n }\n\n constructor(readable: NodeJS.ReadableStream) {\n super();\n this._readable = readable;\n this._position = 0;\n }\n public get position(): number {\n return this._position;\n }\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n if (options.abortSignal?.aborted) {\n throw ABORT_ERROR;\n }\n\n if (size < 0) {\n throw new Error(`size parameter should be positive: ${size}`);\n }\n\n if (size === 0) {\n return new Uint8Array();\n }\n\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n // See if there is already enough data.\n const chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return this.toUint8Array(chunk);\n } else {\n // register callback to wait for enough data to read\n return new Promise((resolve, reject) => {\n /* eslint-disable @typescript-eslint/no-use-before-define */\n const cleanUp: () => void = () => {\n this._readable.removeListener(\"readable\", readableCallback);\n this._readable.removeListener(\"error\", rejectCallback);\n this._readable.removeListener(\"end\", rejectCallback);\n this._readable.removeListener(\"close\", rejectCallback);\n\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n };\n\n const readableCallback: () => void = () => {\n const callbackChunk = this._readable.read(size);\n if (callbackChunk) {\n this._position += callbackChunk.length;\n cleanUp();\n // callbackChunk.length maybe less than desired size if the stream ends.\n resolve(this.toUint8Array(callbackChunk));\n }\n };\n\n const rejectCallback: () => void = () => {\n cleanUp();\n reject();\n };\n\n const abortHandler: () => void = () => {\n cleanUp();\n reject(ABORT_ERROR);\n };\n\n this._readable.on(\"readable\", readableCallback);\n this._readable.once(\"error\", rejectCallback);\n this._readable.once(\"end\", rejectCallback);\n this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal!.addEventListener(\"abort\", abortHandler);\n }\n /* eslint-enable @typescript-eslint/no-use-before-define */\n });\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable } from \"stream\";\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TransferProgressEvent } from \"@azure/core-http\";\n\nimport { AvroReadableFromStream, AvroReader } from \"../../../storage-internal-avro/src\";\nimport { BlobQueryError } from \"../Clients\";\n\nexport interface BlobQuickQueryStreamOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nexport class BlobQuickQueryStream extends Readable {\n private source: NodeJS.ReadableStream;\n private avroReader: AvroReader;\n private avroIter: AsyncIterableIterator;\n private avroPaused: boolean = true;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private onError?: (error: BlobQueryError) => void;\n\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n public constructor(source: NodeJS.ReadableStream, options: BlobQuickQueryStreamOptions = {}) {\n super();\n this.source = source;\n this.onProgress = options.onProgress;\n this.onError = options.onError;\n this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));\n this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n }\n\n public _read(): void {\n if (this.avroPaused) {\n this.readInternal().catch((err) => {\n this.emit(\"error\", err);\n });\n }\n }\n\n private async readInternal() {\n this.avroPaused = false;\n let avroNext;\n do {\n avroNext = await this.avroIter.next();\n if (avroNext.done) {\n break;\n }\n const obj = avroNext.value;\n const schema = (obj as any).$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n {\n const data = (obj as any).data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n {\n const bytesScanned = (obj as any).bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n const totalBytes = (obj as any).totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n const fatal = (obj as any).fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n const name = (obj as any).name;\n if (typeof name !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n const description = (obj as any).description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n const position = (obj as any).position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position,\n name,\n isFatal: fatal,\n description,\n });\n }\n break;\n default:\n throw Error(`Unknown schema ${schema} in avro progress record.`);\n }\n } while (!avroNext.done && !this.avroPaused);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse, isNode } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStream, BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n return undefined;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n private blobDownloadStream?: BlobQuickQueryStream;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(\n this.originalResponse.readableStreamBody!,\n options\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { CancelOnProgress, PollOperationState } from \"@azure/core-lro\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\nimport {\n LeaseAccessConditions,\n SequenceNumberAccessConditions,\n AppendPositionAccessConditions,\n AccessTier,\n CpkInfo,\n BlobDownloadResponseModel,\n} from \"./generatedModels\";\nimport { EncryptionAlgorithmAES25 } from \"./utils/constants\";\n\n/**\n * Blob tags.\n */\nexport type Tags = Record;\n\n/**\n * A map of name-value pairs to associate with the resource.\n */\nexport interface Metadata {\n /**\n * A name-value pair.\n */\n [propertyName: string]: string;\n}\n\n/**\n * standard HTTP conditional headers and tags condition.\n */\nexport interface ModifiedAccessConditions\n extends MatchConditions,\n ModificationConditions,\n TagConditions {}\n\n/**\n * standard HTTP conditional headers, tags condition and lease condition\n */\nexport interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions {}\n\n/**\n * Conditions to add to the creation of this page blob.\n */\nexport interface PageBlobRequestConditions\n extends BlobRequestConditions,\n SequenceNumberAccessConditions {}\n\n/**\n * Conditions to add to the creation of this append blob.\n */\nexport interface AppendBlobRequestConditions\n extends BlobRequestConditions,\n AppendPositionAccessConditions {}\n\n/**\n * Specifies HTTP options for conditional requests based on modification time.\n */\nexport interface ModificationConditions {\n /**\n * Specify this header value to operate only on a blob if it has been modified since the\n * specified date/time.\n */\n ifModifiedSince?: Date;\n /**\n * Specify this header value to operate only on a blob if it has not been modified since the\n * specified date/time.\n */\n ifUnmodifiedSince?: Date;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on ETag matching.\n */\nexport interface MatchConditions {\n /**\n * Specify an ETag value to operate only on blobs with a matching value.\n */\n ifMatch?: string;\n /**\n * Specify an ETag value to operate only on blobs without a matching value.\n */\n ifNoneMatch?: string;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on blob tags.\n */\nexport interface TagConditions {\n /**\n * Optional SQL statement to apply to the tags of the blob.\n */\n tagConditions?: string;\n}\n\n/**\n * Conditions to meet for the container.\n */\nexport interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions {}\n\n/**\n * Represents the access tier on a blob.\n * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}\n */\nexport enum BlockBlobTier {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n Hot = \"Hot\",\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n Cool = \"Cool\",\n /**\n * Optimized for storing data that is rarely accessed.\n */\n Cold = \"Cold\",\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n Archive = \"Archive\",\n}\n\n/**\n * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.\n * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}\n * for detailed information on the corresponding IOPS and throughput per PageBlobTier.\n */\nexport enum PremiumPageBlobTier {\n /**\n * P4 Tier.\n */\n P4 = \"P4\",\n /**\n * P6 Tier.\n */\n P6 = \"P6\",\n /**\n * P10 Tier.\n */\n P10 = \"P10\",\n /**\n * P15 Tier.\n */\n P15 = \"P15\",\n /**\n * P20 Tier.\n */\n P20 = \"P20\",\n /**\n * P30 Tier.\n */\n P30 = \"P30\",\n /**\n * P40 Tier.\n */\n P40 = \"P40\",\n /**\n * P50 Tier.\n */\n P50 = \"P50\",\n /**\n * P60 Tier.\n */\n P60 = \"P60\",\n /**\n * P70 Tier.\n */\n P70 = \"P70\",\n /**\n * P80 Tier.\n */\n P80 = \"P80\",\n}\n\nexport function toAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string | undefined\n): AccessTier | undefined {\n if (tier === undefined) {\n return undefined;\n }\n\n return tier as AccessTier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\n\nexport function ensureCpkIfSpecified(cpk: CpkInfo | undefined, isHttps: boolean): void {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Specifies the Replication Status of a blob. This is used when a storage account has\n * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}.\n */\nexport type ObjectReplicationStatus = \"complete\" | \"failed\";\n\n/**\n * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob.\n * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}.\n */\nexport interface ObjectReplicationRule {\n /**\n * The Object Replication Rule ID.\n */\n ruleId: string;\n\n /**\n * The Replication Status\n */\n replicationStatus: ObjectReplicationStatus;\n}\n\n/**\n * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}.\n * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the\n * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses\n * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}.\n */\nexport interface ObjectReplicationPolicy {\n /**\n * The Object Replication Policy ID.\n */\n policyId: string;\n\n /**\n * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID.\n */\n rules: ObjectReplicationRule[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadResponseParsed extends BlobDownloadResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * The type of a {@link BlobQueryArrowField}.\n */\nexport type BlobQueryArrowFieldType =\n | \"int64\"\n | \"bool\"\n | \"timestamp[ms]\"\n | \"string\"\n | \"double\"\n | \"decimal\";\n\n/**\n * Describe a field in {@link BlobQueryArrowConfiguration}.\n */\nexport interface BlobQueryArrowField {\n /**\n * The type of the field.\n */\n type: BlobQueryArrowFieldType;\n\n /**\n * The name of the field.\n */\n name?: string;\n\n /**\n * The precision of the field. Required if type is \"decimal\".\n */\n precision?: number;\n\n /**\n * The scale of the field. Required if type is is \"decimal\".\n */\n scale?: number;\n}\n\n/**\n * Describe immutable policy for blob.\n */\nexport interface BlobImmutabilityPolicy {\n /**\n * Specifies the date time when the blobs immutability policy is set to expire.\n */\n expiriesOn?: Date;\n /**\n * Specifies the immutability policy mode to set on the blob.\n */\n policyMode?: BlobImmutabilityPolicyMode;\n}\n\n/**\n * Represents authentication information in Authorization, ProxyAuthorization,\n * WWW-Authenticate, and Proxy-Authenticate header values.\n */\nexport interface HttpAuthorization {\n /**\n * The scheme to use for authorization.\n */\n scheme: string;\n\n /**\n * the credentials containing the authentication information of the user agent for the resource being requested.\n */\n value: string;\n}\n\n/**\n * Defines the known cloud audiences for Storage.\n */\nexport enum StorageBlobAudience {\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Storage.\n */\n StorageOAuthScopes = \"https://storage.azure.com/.default\",\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Disk.\n */\n DiskComputeOAuthScopes = \"https://disk.compute.azure.com/.default\",\n}\n\nexport function getBlobServiceAccountAudience(storageAccountName: string): string {\n return `https://${storageAccountName}.blob.core.windows.net/.default`;\n}\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\nexport interface PollerLikeWithCancellation, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { delay } from \"@azure/core-http\";\nimport { PollOperation, PollOperationState, Poller } from \"@azure/core-lro\";\nimport { BlobClient, BlobStartCopyFromURLOptions, BlobBeginCopyFromURLResponse } from \"../Clients\";\n\n/**\n * Defines the operations from a {@link BlobClient} that are needed for the poller\n * returned by {@link BlobClient.beginCopyFromURL} to work.\n */\nexport type CopyPollerBlobClient = Pick & {\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptions\n ): Promise;\n};\n\n/**\n * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}.\n *\n * This state is passed into the user-specified `onProgress` callback\n * whenever copy progress is detected.\n */\nexport interface BlobBeginCopyFromUrlPollState\n extends PollOperationState {\n /**\n * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}.\n */\n readonly blobClient: CopyPollerBlobClient;\n /**\n * The copyId that identifies the in-progress blob copy.\n */\n copyId?: string;\n /**\n * the progress of the blob copy as reported by the service.\n */\n copyProgress?: string;\n /**\n * The source URL provided in {@link BlobClient.beginCopyFromURL}.\n */\n copySource: string;\n /**\n * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call.\n * This is exposed for the poller and should not be modified directly.\n */\n readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * The PollOperation responsible for:\n * - performing the initial startCopyFromURL\n * - checking the copy status via getProperties\n * - cancellation via abortCopyFromURL\n * @hidden\n */\nexport interface BlobBeginCopyFromURLPollOperation\n extends PollOperation {}\n\n/**\n * The set of options used to configure the poller.\n * This is an internal interface populated by {@link BlobClient.beginCopyFromURL}.\n *\n * @hidden\n */\nexport interface BlobBeginCopyFromUrlPollerOptions {\n blobClient: CopyPollerBlobClient;\n copySource: string;\n intervalInMs?: number;\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n resumeFrom?: string;\n startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nexport class BlobBeginCopyFromUrlPoller extends Poller<\n BlobBeginCopyFromUrlPollState,\n BlobBeginCopyFromURLResponse\n> {\n public intervalInMs: number;\n\n constructor(options: BlobBeginCopyFromUrlPollerOptions) {\n const {\n blobClient,\n copySource,\n intervalInMs = 15000,\n onProgress,\n resumeFrom,\n startCopyFromURLOptions,\n } = options;\n\n let state: BlobBeginCopyFromUrlPollState | undefined;\n\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n\n const operation = makeBlobBeginCopyFromURLPollOperation({\n ...state,\n blobClient,\n copySource,\n startCopyFromURLOptions,\n });\n\n super(operation);\n\n if (typeof onProgress === \"function\") {\n this.onProgress(onProgress);\n }\n\n this.intervalInMs = intervalInMs;\n }\n\n public delay(): Promise {\n return delay(this.intervalInMs);\n }\n}\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst cancel: BlobBeginCopyFromURLPollOperation[\"cancel\"] = async function cancel(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n) {\n const state = this.state;\n const { copyId } = state;\n if (state.isCompleted) {\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n if (!copyId) {\n state.isCancelled = true;\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n await state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n });\n state.isCancelled = true;\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst update: BlobBeginCopyFromURLPollOperation[\"update\"] = async function update(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n): Promise {\n const state = this.state;\n const { blobClient, copySource, startCopyFromURLOptions } = state;\n\n if (!state.isStarted) {\n state.isStarted = true;\n const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);\n\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n } else if (!state.isCompleted) {\n try {\n const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });\n const { copyStatus, copyProgress } = result;\n const prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (\n copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\"\n ) {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n } else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n } else if (copyStatus === \"failed\") {\n state.error = new Error(\n `Blob copy failed with reason: \"${result.copyStatusDescription || \"unknown\"}\"`\n );\n state.isCompleted = true;\n }\n } catch (err: any) {\n state.error = err;\n state.isCompleted = true;\n }\n }\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst toString: BlobBeginCopyFromURLPollOperation[\"toString\"] = function toString(\n this: BlobBeginCopyFromURLPollOperation\n) {\n return JSON.stringify({ state: this.state }, (key, value) => {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(\n state: BlobBeginCopyFromUrlPollState\n): BlobBeginCopyFromURLPollOperation {\n return {\n state: { ...state },\n cancel,\n toString,\n update,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// In browser, during webpack or browserify bundling, this module will be replaced by 'events'\n// https://github.com/Gozala/events\nimport { EventEmitter } from \"events\";\n\n/**\n * Operation is an async function to be executed and managed by Batch.\n */\nexport declare type Operation = () => Promise;\n\n/**\n * States for Batch.\n */\nenum BatchStates {\n Good,\n Error,\n}\n\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nexport class Batch {\n /**\n * Concurrency. Must be lager than 0.\n */\n private concurrency: number;\n\n /**\n * Number of active operations under execution.\n */\n private actives: number = 0;\n\n /**\n * Number of completed operations under execution.\n */\n private completed: number = 0;\n\n /**\n * Offset of next operation to be executed.\n */\n private offset: number = 0;\n\n /**\n * Operation array to be executed.\n */\n private operations: Operation[] = [];\n\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n private state: BatchStates = BatchStates.Good;\n\n /**\n * A private emitter used to pass events inside this class.\n */\n private emitter: EventEmitter;\n\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n public constructor(concurrency: number = 5) {\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new EventEmitter();\n }\n\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n public addOperation(operation: Operation): void {\n this.operations.push(async () => {\n try {\n this.actives++;\n await operation();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n } catch (error: any) {\n this.emitter.emit(\"error\", error);\n }\n });\n }\n\n /**\n * Start execute operations in the queue.\n *\n */\n public async do(): Promise {\n if (this.operations.length === 0) {\n return Promise.resolve();\n }\n\n this.parallelExecute();\n\n return new Promise((resolve, reject) => {\n this.emitter.on(\"finish\", resolve);\n\n this.emitter.on(\"error\", (error) => {\n this.state = BatchStates.Error;\n reject(error);\n });\n });\n }\n\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n private nextOperation(): Operation | null {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n }\n\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n private parallelExecute(): void {\n if (this.state === BatchStates.Error) {\n return;\n }\n\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n\n while (this.actives < this.concurrency) {\n const operation = this.nextOperation();\n if (operation) {\n operation();\n } else {\n return;\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable, ReadableOptions } from \"stream\";\n\n/**\n * Options to configure the BuffersStream.\n */\nexport interface BuffersStreamOptions extends ReadableOptions {}\n\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nexport class BuffersStream extends Readable {\n /**\n * The offset of data to be read in the current buffer.\n */\n private byteOffsetInCurrentBuffer: number;\n\n /**\n * The index of buffer to be read in the array of buffers.\n */\n private bufferIndex: number;\n\n /**\n * The total length of data already read.\n */\n private pushedBytesLength: number;\n\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n constructor(\n private buffers: Buffer[],\n private byteLength: number,\n options?: BuffersStreamOptions\n ) {\n super(options);\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex = 0;\n this.pushedBytesLength = 0;\n\n // check byteLength is no larger than buffers[] total length\n let buffersLength = 0;\n for (const buf of this.buffers) {\n buffersLength += buf.byteLength;\n }\n if (buffersLength < this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n }\n\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n public _read(size?: number) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n\n if (!size) {\n size = this.readableHighWaterMark;\n }\n\n const outBuffers: Buffer[] = [];\n let i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n const remainingCapacityInThisBuffer =\n this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n const end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n } else {\n // chunkSize = remaining\n const end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n } else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n } else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BuffersStream } from \"./BuffersStream\";\nimport { Readable } from \"stream\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nconst maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number) {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EventEmitter } from \"events\";\nimport { Readable } from \"stream\";\nimport { PooledBuffer } from \"./PooledBuffer\";\n\n/**\n * OutgoingHandler is an async function triggered by BufferScheduler.\n */\nexport declare type OutgoingHandler = (\n body: () => NodeJS.ReadableStream,\n length: number,\n offset?: number\n) => Promise;\n\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nexport class BufferScheduler {\n /**\n * Size of buffers in incoming and outgoing queues. This class will try to align\n * data read from Readable stream into buffer chunks with bufferSize defined.\n */\n private readonly bufferSize: number;\n\n /**\n * How many buffers can be created or maintained.\n */\n private readonly maxBuffers: number;\n\n /**\n * A Node.js Readable stream.\n */\n private readonly readable: Readable;\n\n /**\n * OutgoingHandler is an async function triggered by BufferScheduler when there\n * are available buffers in outgoing array.\n */\n private readonly outgoingHandler: OutgoingHandler;\n\n /**\n * An internal event emitter.\n */\n private readonly emitter: EventEmitter = new EventEmitter();\n\n /**\n * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)\n */\n private readonly concurrency: number;\n\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n private offset: number = 0;\n\n /**\n * An internal marker to track whether stream is end.\n */\n private isStreamEnd: boolean = false;\n\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n private isError: boolean = false;\n\n /**\n * How many handlers are executing.\n */\n private executingOutgoingHandlers: number = 0;\n\n /**\n * Encoding of the input Readable stream which has string data type instead of Buffer.\n */\n private encoding?: BufferEncoding;\n\n /**\n * How many buffers have been allocated.\n */\n private numBuffers: number = 0;\n\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n private unresolvedDataArray: Buffer[] = [];\n\n /**\n * How much data consisted in unresolvedDataArray.\n */\n private unresolvedLength: number = 0;\n\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n private incoming: PooledBuffer[] = [];\n\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n private outgoing: PooledBuffer[] = [];\n\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n constructor(\n readable: Readable,\n bufferSize: number,\n maxBuffers: number,\n outgoingHandler: OutgoingHandler,\n concurrency: number,\n encoding?: BufferEncoding\n ) {\n if (bufferSize <= 0) {\n throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);\n }\n\n if (maxBuffers <= 0) {\n throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);\n }\n\n if (concurrency <= 0) {\n throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);\n }\n\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n public async do(): Promise {\n return new Promise((resolve, reject) => {\n this.readable.on(\"data\", (data) => {\n data = typeof data === \"string\" ? Buffer.from(data, this.encoding) : data;\n this.appendUnresolvedData(data);\n\n if (!this.resolveData()) {\n this.readable.pause();\n }\n });\n\n this.readable.on(\"error\", (err) => {\n this.emitter.emit(\"error\", err);\n });\n\n this.readable.on(\"end\", () => {\n this.isStreamEnd = true;\n this.emitter.emit(\"checkEnd\");\n });\n\n this.emitter.on(\"error\", (err) => {\n this.isError = true;\n this.readable.pause();\n reject(err);\n });\n\n this.emitter.on(\"checkEnd\", () => {\n if (this.outgoing.length > 0) {\n this.triggerOutgoingHandlers();\n return;\n }\n\n if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {\n if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {\n const buffer = this.shiftBufferFromUnresolvedDataArray();\n this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)\n .then(resolve)\n .catch(reject);\n } else if (this.unresolvedLength >= this.bufferSize) {\n return;\n } else {\n resolve();\n }\n }\n });\n });\n }\n\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n private appendUnresolvedData(data: Buffer) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n }\n\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n private shiftBufferFromUnresolvedDataArray(buffer?: PooledBuffer): PooledBuffer {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n } else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n\n this.unresolvedLength -= buffer.size;\n return buffer;\n }\n\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n private resolveData(): boolean {\n while (this.unresolvedLength >= this.bufferSize) {\n let buffer: PooledBuffer;\n\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift()!;\n this.shiftBufferFromUnresolvedDataArray(buffer);\n } else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n } else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n }\n\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n private async triggerOutgoingHandlers() {\n let buffer: PooledBuffer | undefined;\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return;\n }\n\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n }\n\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n private async triggerOutgoingHandler(buffer: PooledBuffer): Promise {\n const bufferLength = buffer.size;\n\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n\n try {\n await this.outgoingHandler(\n () => buffer.getReadableStream(),\n bufferLength,\n this.offset - bufferLength\n );\n } catch (err: any) {\n this.emitter.emit(\"error\", err);\n return;\n }\n\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n }\n\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n private reuseBuffer(buffer: PooledBuffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as fs from \"fs\";\nimport * as util from \"util\";\nimport { REQUEST_TIMEOUT } from \"./constants\";\n\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nexport async function streamToBuffer(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n offset: number,\n end: number,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const count = end - offset; // Total amount of data needed in stream\n\n return new Promise((resolve, reject) => {\n const timeout = setTimeout(\n () => reject(new Error(`The operation cannot be completed in timeout.`)),\n REQUEST_TIMEOUT\n );\n\n stream.on(\"readable\", () => {\n if (pos >= count) {\n clearTimeout(timeout);\n resolve();\n return;\n }\n\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n // How much data needed in this chunk\n const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n\n stream.on(\"end\", () => {\n clearTimeout(timeout);\n if (pos < count) {\n reject(\n new Error(\n `Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`\n )\n );\n }\n resolve();\n });\n\n stream.on(\"error\", (msg) => {\n clearTimeout(timeout);\n reject(msg);\n });\n });\n}\n\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nexport async function streamToBuffer2(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const bufferSize = buffer.length;\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n if (pos + chunk.length > bufferSize) {\n reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));\n return;\n }\n\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n\n stream.on(\"end\", () => {\n resolve(pos);\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into a buffer.\n *\n * @param stream - A Node.js Readable stream\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n */\nexport async function streamToBuffer3(\n readableStream: NodeJS.ReadableStream,\n encoding?: BufferEncoding\n): Promise {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n readableStream.on(\"data\", (data: Buffer | string) => {\n chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding));\n });\n readableStream.on(\"end\", () => {\n resolve(Buffer.concat(chunks));\n });\n readableStream.on(\"error\", reject);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nexport async function readStreamToLocalFile(\n rs: NodeJS.ReadableStream,\n file: string\n): Promise {\n return new Promise((resolve, reject) => {\n const ws = fs.createWriteStream(file);\n\n rs.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"close\", resolve);\n\n rs.pipe(ws);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nexport const fsStat = util.promisify(fs.stat);\n\nexport const fsCreateReadStream = fs.createReadStream;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n generateUuid,\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n TransferProgressEvent,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PollOperationState } from \"@azure/core-lro\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Readable } from \"stream\";\n\nimport { BlobDownloadResponse } from \"./BlobDownloadResponse\";\nimport { BlobQueryResponse } from \"./BlobQueryResponse\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from \"./generated/src/operations\";\nimport {\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobCreateResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobCreateSnapshotResponse,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobDownloadResponseModel,\n BlobGetPropertiesResponseModel,\n BlobGetTagsHeaders,\n BlobSetHTTPHeadersResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobSetTierResponse,\n BlobStartCopyFromURLResponse,\n BlobTags,\n BlobUndeleteResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobStageBlockResponse,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockListType,\n CpkInfo,\n DeleteSnapshotsOptionType,\n LeaseAccessConditions,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalResponse,\n PageBlobCreateResponse,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobUploadPagesResponse,\n RehydratePriority,\n SequenceNumberActionType,\n BlockBlobPutBlobFromUrlResponse,\n BlobHTTPHeaders,\n PageBlobGetPageRangesResponseModel,\n PageRangeInfo,\n PageBlobGetPageRangesDiffResponseModel,\n BlobCopySourceTags,\n} from \"./generatedModels\";\nimport {\n AppendBlobRequestConditions,\n BlobDownloadResponseParsed,\n BlobRequestConditions,\n BlockBlobTier,\n ensureCpkIfSpecified,\n Metadata,\n ObjectReplicationPolicy,\n PageBlobRequestConditions,\n PremiumPageBlobTier,\n Tags,\n toAccessTier,\n TagConditions,\n MatchConditions,\n ModificationConditions,\n ModifiedAccessConditions,\n BlobQueryArrowField,\n BlobImmutabilityPolicy,\n HttpAuthorization,\n PollerLikeWithCancellation,\n} from \"./models\";\nimport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n rangeResponseFromModel,\n} from \"./PageBlobRangeResponse\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport {\n BlobBeginCopyFromUrlPoller,\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\nimport { Range, rangeToString } from \"./Range\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { Batch } from \"./utils/Batch\";\nimport { BufferScheduler } from \"../../storage-common/src\";\nimport {\n BlobDoesNotUseCustomerSpecifiedEncryption,\n BlobUsesCustomerSpecifiedEncryptionMsg,\n BLOCK_BLOB_MAX_BLOCKS,\n BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES,\n BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES,\n DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES,\n DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS,\n ETagAny,\n URLConstants,\n} from \"./utils/constants\";\nimport { createSpan, convertTracingToRequestOptionsBase } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n ExtractPageRangeInfoItems,\n generateBlockID,\n getURLParameter,\n httpAuthorizationToString,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n setURLParameter,\n toBlobTags,\n toBlobTagsString,\n toQuerySerialization,\n toTags,\n} from \"./utils/utils.common\";\nimport {\n fsCreateReadStream,\n fsStat,\n readStreamToLocalFile,\n streamToBuffer,\n} from \"./utils/utils.node\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobSASPermissions } from \"./sas/BlobSASPermissions\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldResponse,\n} from \"./generatedModels\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions {\n /**\n * The amount of time in milliseconds the poller should wait between\n * calls to the service to determine the status of the Blob copy.\n * Defaults to 15 seconds.\n */\n intervalInMs?: number;\n /**\n * Callback to receive the state of the copy progress.\n */\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n /**\n * Serialized poller state that can be used to resume polling from.\n * This may be useful when starting a copy on one process or thread\n * and you wish to continue polling on another process or thread.\n *\n * To get serialized poller state, call `poller.toString()` on an existing\n * poller.\n */\n resumeFrom?: string;\n}\n\n/**\n * Contains response data for the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse {}\n\n/**\n * Options to configure the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve.\n */\n snapshot?: string;\n /**\n * When this is set to true and download range of blob, the service returns the MD5 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentMD5?: boolean;\n /**\n * When this is set to true and download range of blob, the service returns the CRC64 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentCrc64?: boolean;\n /**\n * Conditions to meet when downloading blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Call back to receive events on the progress of download operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original body download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional `FileClient.download()` request will be made\n * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached.\n *\n * Default value is 5, please set a larger value when loading large files in poor network.\n */\n maxRetryRequests?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.exists} operation.\n */\nexport interface BlobExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Conditions to meet.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting blob properties.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.delete} operation.\n */\nexport interface BlobDeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies options to delete blobs that have associated snapshots.\n * - `include`: Delete the base blob and all of its snapshots.\n * - `only`: Delete only the blob's snapshots and not the blob itself.\n */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.undelete} operation.\n */\nexport interface BlobUndeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setHTTPHeaders} operation.\n */\nexport interface BlobSetHTTPHeadersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob HTTP headers.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setMetadata} operation.\n */\nexport interface BlobSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob metadata.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.setTags} operation.\n */\nexport interface BlobSetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getTags} operation.\n */\nexport interface BlobGetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getTags} operation.\n */\nexport type BlobGetTagsResponse = { tags: Tags } & BlobGetTagsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: BlobGetTagsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: BlobTags;\n };\n };\n\n/**\n * Options to configure Blob - Acquire Lease operation.\n */\nexport interface BlobAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Release Lease operation.\n */\nexport interface BlobReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Renew Lease operation.\n */\nexport interface BlobRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Change Lease operation.\n */\nexport interface BlobChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Break Lease operation.\n */\nexport interface BlobBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.createSnapshot} operation.\n */\nexport interface BlobCreateSnapshotOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet when creating blob snapshots.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobStartCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the blob that are being copied.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Overrides the sealed state of the destination blob. Default true.\n */\n sealBlob?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobClient.abortCopyFromURL} operation.\n */\nexport interface BlobAbortCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.syncCopyFromURL} operation.\n */\nexport interface BlobSyncCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Options to configure the {@link BlobClient.setAccessTier} operation.\n */\nexport interface BlobSetTierOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n}\n\n/**\n * Option interface for the {@link BlobClient.downloadToBuffer} operation.\n */\nexport interface BlobDownloadToBufferOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * blockSize is the data every request trying to download.\n * Must be greater than or equal to 0.\n * If set to 0 or undefined, blockSize will automatically calculated according to the blob size.\n */\n blockSize?: number;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original block download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional FileClient.download() request will be made\n * from the broken point, until the requested block has been successfully downloaded or\n * maxRetryRequestsPerBlock is reached.\n *\n * Default value is 5, please set a larger value when in poor network.\n */\n maxRetryRequestsPerBlock?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel download.\n */\n concurrency?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Contains response data for the {@link BlobClient.deleteIfExists} operation.\n */\nexport interface BlobDeleteIfExistsResponse extends BlobDeleteResponse {\n /**\n * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}.\n */\nexport interface CommonGenerateSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n}\n\n/**\n * Options to configure {@link BlobClient.generateSasUrl} operation.\n */\nexport interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: BlobSASPermissions;\n}\n\n/**\n * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation.\n */\nexport interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation.\n */\nexport interface BlobSetImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n modifiedAccessCondition?: ModificationConditions;\n}\n\n/**\n * Options for setting legal hold {@link BlobClient.setLegalHold} operation.\n */\nexport interface BlobSetLegalHoldOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nexport class BlobClient extends StorageClient {\n /**\n * blobContext provided by protocol layer.\n */\n private blobContext: StorageBlob;\n\n private _name: string;\n private _containerName: string;\n\n private _versionId?: string;\n private _snapshot?: string;\n\n /**\n * The name of the blob.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * The name of the storage container the blob is associated with.\n */\n public get containerName(): string {\n return this._containerName;\n }\n\n /**\n *\n * Creates an instance of BlobClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n options = options || {};\n let pipeline: PipelineLike;\n let url: string;\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n if (blobNameOrOptions && typeof blobNameOrOptions !== \"string\") {\n options = blobNameOrOptions as StoragePipelineOptions;\n }\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n\n super(url, pipeline);\n ({ blobName: this._name, containerName: this._containerName } =\n this.getBlobAndContainerNamesFromUrl());\n this.blobContext = new StorageBlob(this.storageClientContext);\n\n this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT) as string;\n this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID) as string;\n }\n\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n public withSnapshot(snapshot: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n public withVersion(versionId: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.VERSIONID,\n versionId.length === 0 ? undefined : versionId\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a AppendBlobClient object.\n *\n */\n public getAppendBlobClient(): AppendBlobClient {\n return new AppendBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a BlockBlobClient object.\n *\n */\n public getBlockBlobClient(): BlockBlobClient {\n return new BlockBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a PageBlobClient object.\n *\n */\n public getPageBlobClient(): PageBlobClient {\n return new PageBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n public async download(\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlobClient-download\", options);\n\n try {\n const res = await this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream\n },\n range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedRes = {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n // Return browser response immediately\n if (!isNode) {\n return wrappedRes;\n }\n\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n\n if (res.contentLength === undefined) {\n throw new RangeError(`File download response doesn't contain valid content length header`);\n }\n\n if (!res.etag) {\n throw new RangeError(`File download response doesn't contain valid etag header`);\n }\n\n return new BlobDownloadResponse(\n wrappedRes,\n async (start: number): Promise => {\n const updatedDownloadOptions: BlobDownloadOptionalParams = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions!.ifMatch || res.etag,\n ifModifiedSince: options.conditions!.ifModifiedSince,\n ifNoneMatch: options.conditions!.ifNoneMatch,\n ifUnmodifiedSince: options.conditions!.ifUnmodifiedSince,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({\n count: offset + res.contentLength! - start,\n offset: start,\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n };\n\n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n\n return (\n await this.blobContext.download({\n abortSignal: options.abortSignal,\n ...updatedDownloadOptions,\n })\n ).readableStreamBody!;\n },\n offset,\n res.contentLength!,\n {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress,\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n public async exists(options: BlobExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-exists\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n await this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n // Expected exception when checking blob existence\n return false;\n } else if (\n e.statusCode === 409 &&\n (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||\n e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)\n ) {\n // Expected exception when checking blob existence\n return true;\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n public async getProperties(\n options: BlobGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getProperties\", options);\n try {\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const res = await this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n return {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async delete(options: BlobDeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-delete\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.blobContext.delete({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async deleteIfExists(\n options: BlobDeleteOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteIfExists\", options);\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n public async undelete(options: BlobUndeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-undelete\", options);\n try {\n return await this.blobContext.undelete({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * A common header to set is `blobContentType`\n * enabling the browser to provide functionality\n * based on file type.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n public async setHTTPHeaders(\n blobHTTPHeaders?: BlobHTTPHeaders,\n options: BlobSetHTTPHeadersOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setHTTPHeaders\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setHttpHeaders({\n abortSignal: options.abortSignal,\n blobHttpHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: BlobSetMetadataOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setMetadata\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n public async setTags(tags: Tags, options: BlobSetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setTags\", options);\n try {\n return await this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n tags: toBlobTags(tags),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n public async getTags(options: BlobGetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getTags\", options);\n try {\n const response = await this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n const wrappedResponse: BlobGetTagsResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n tags: toTags({ blobTagSet: response.blobTagSet }) || {},\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n public async createSnapshot(\n options: BlobCreateSnapshotOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-createSnapshot\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n public async beginCopyFromURL(\n copySource: string,\n options: BlobBeginCopyFromURLOptions = {}\n ): Promise<\n PollerLikeWithCancellation<\n PollOperationState,\n BlobBeginCopyFromURLResponse\n >\n > {\n const client: CopyPollerBlobClient = {\n abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),\n getProperties: (...args) => this.getProperties(...args),\n startCopyFromURL: (...args) => this.startCopyFromURL(...args),\n };\n const poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options,\n });\n\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n await poller.poll();\n\n return poller;\n }\n\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n public async abortCopyFromURL(\n copyId: string,\n options: BlobAbortCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-abortCopyFromURL\", options);\n try {\n return await this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n public async syncCopyFromURL(\n copySource: string,\n options: BlobSyncCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-syncCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n sourceContentMD5: options.sourceContentMD5,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n encryptionScope: options.encryptionScope,\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n public async setAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string,\n options: BlobSetTierOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setAccessTier\", options);\n try {\n return await this.blobContext.setTier(toAccessTier(tier)!, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n rehydratePriority: options.rehydratePriority,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level function\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param buffer - Buffer to be fill, must have length larger than count\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n buffer: Buffer,\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n public async downloadToBuffer(\n param1?: Buffer | number,\n param2?: number,\n param3?: BlobDownloadToBufferOptions | number,\n param4: BlobDownloadToBufferOptions = {}\n ): Promise {\n let buffer: Buffer | undefined;\n let offset = 0;\n let count = 0;\n let options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n } else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = (param3 as BlobDownloadToBufferOptions) || {};\n }\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToBuffer\", options);\n\n try {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n\n if (!options.conditions) {\n options.conditions = {};\n }\n\n // Customer doesn't specify length, get it\n if (!count) {\n const response = await this.getProperties({\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n count = response.contentLength! - offset;\n if (count < 0) {\n throw new RangeError(\n `offset ${offset} shouldn't be larger than blob size ${response.contentLength!}`\n );\n }\n }\n\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n } catch (error: any) {\n throw new Error(\n `Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\\t ${error.message}`\n );\n }\n }\n\n if (buffer.length < count) {\n throw new RangeError(\n `The buffer's size should be equal to or larger than the request count of bytes: ${count}`\n );\n }\n\n let transferProgress: number = 0;\n const batch = new Batch(options.concurrency);\n for (let off = offset; off < offset + count; off = off + options.blockSize) {\n batch.addOperation(async () => {\n // Exclusive chunk end position\n let chunkEnd = offset + count!;\n if (off + options.blockSize! < chunkEnd) {\n chunkEnd = off + options.blockSize!;\n }\n const response = await this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n const stream = response.readableStreamBody!;\n await streamToBuffer(stream, buffer!, off - offset, chunkEnd - offset);\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n });\n }\n await batch.do();\n return buffer;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n public async downloadToFile(\n filePath: string,\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToFile\", options);\n try {\n const response = await this.download(offset, count, {\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n if (response.readableStreamBody) {\n await readStreamToLocalFile(response.readableStreamBody, filePath);\n }\n\n // The stream is no longer accessible so setting it to undefined.\n (response as any).blobDownloadStream = undefined;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n private getBlobAndContainerNamesFromUrl(): { blobName: string; containerName: string } {\n let containerName;\n let blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents![2];\n blobName = pathComponents![4];\n } else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n }\n\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return { blobName, containerName };\n } catch (error: any) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n private async startCopyFromURL(\n copySource: string,\n options: BlobStartCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-startCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions,\n },\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: BlobGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n blobName: this._name,\n snapshotTime: this._snapshot,\n versionId: this._versionId,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Delete the immutablility policy on the blob.\n *\n * @param options - Optional options to delete immutability policy on the blob.\n */\n public async deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteImmutabilityPolicy\", options);\n try {\n return await this.blobContext.deleteImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set immutablility policy on the blob.\n *\n * @param options - Optional options to set immutability policy on the blob.\n */\n public async setImmutabilityPolicy(\n immutabilityPolicy: BlobImmutabilityPolicy,\n options?: BlobSetImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setImmutabilityPolicy\", options);\n try {\n return await this.blobContext.setImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,\n immutabilityPolicyMode: immutabilityPolicy.policyMode,\n modifiedAccessConditions: options?.modifiedAccessCondition,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set legal hold on the blob.\n *\n * @param options - Optional options to set legal hold on the blob.\n */\n public async setLegalHold(\n legalHoldEnabled: boolean,\n options?: BlobSetLegalHoldOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setLegalHold\", options);\n try {\n return await this.blobContext.setLegalHold(legalHoldEnabled, {\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link AppendBlobClient.create} operation.\n */\nexport interface AppendBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Conditions to meet when creating append blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when creating append blobs. A common header\n * to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * HTTP headers to set when creating append blobs. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.seal} operation.\n */\nexport interface AppendBlobSealOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet.\n */\n conditions?: AppendBlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlock} operation.\n */\nexport interface AppendBlobAppendBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Callback to receive events on the progress of append block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation.\n */\nexport interface AppendBlobAppendBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link appendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nexport class AppendBlobClient extends BlobClient {\n /**\n * appendBlobsContext provided by protocol layer.\n */\n private appendBlobContext: AppendBlob;\n\n /**\n *\n * Creates an instance of AppendBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.appendBlobContext = new AppendBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): AppendBlobClient {\n return new AppendBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n public async create(options: AppendBlobCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-create\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n public async createIfNotExists(\n options: AppendBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-createIfNotExists\", options);\n const conditions = { ifNoneMatch: ETagAny };\n try {\n const res = await this.create({\n ...updatedOptions,\n conditions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n public async seal(options: AppendBlobSealOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-seal\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n public async appendBlock(\n body: HttpRequestBody,\n contentLength: number,\n options: AppendBlobAppendBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlock\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlock(contentLength, body, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n public async appendBlockFromURL(\n sourceURL: string,\n sourceOffset: number,\n count: number,\n options: AppendBlobAppendBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlockFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link BlockBlobClient.upload} operation.\n */\nexport interface BlockBlobUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when uploading to a block blob. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when uploading to a block blob.\n */\n metadata?: Metadata;\n /**\n * Callback to receive events on the progress of upload operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation.\n */\nexport interface BlockBlobSyncUploadFromURLOptions extends CommonOptions {\n /**\n * Server timeout in seconds.\n * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations\n */\n timeoutInSeconds?: number;\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value\n * pairs are specified, the operation will copy the metadata from the source blob or file to the\n * destination blob. If one or more name-value pairs are specified, the destination blob is\n * created with the specified metadata, and metadata is not copied from the source blob or file.\n * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules\n * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more\n * information.\n */\n metadata?: Metadata;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Optional, default is true. Indicates if properties from the source blob should be copied.\n */\n copySourceBlobProperties?: boolean;\n /**\n * HTTP headers to set when uploading to a block blob.\n *\n * A common header to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * Conditions to meet for the destination Azure Blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Conditions to meet for the source Azure Blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Blob query error type.\n */\nexport interface BlobQueryError {\n /**\n * Whether error is fatal. Fatal error will stop query.\n */\n isFatal: boolean;\n /**\n * Error name.\n */\n name: string;\n /**\n * Position in bytes of the query.\n */\n position: number;\n /**\n * Error description.\n */\n description: string;\n}\n\n/**\n * Options to query blob with JSON format.\n */\nexport interface BlobQueryJsonTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a JSON format blob.\n */\n kind: \"json\";\n}\n\n/**\n * Options to query blob with CSV format.\n */\nexport interface BlobQueryCsvTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a CSV format blob.\n */\n kind: \"csv\";\n /**\n * Column separator. Default is \",\".\n */\n columnSeparator?: string;\n /**\n * Field quote.\n */\n fieldQuote?: string;\n /**\n * Escape character.\n */\n escapeCharacter?: string;\n /**\n * Has headers. Default is false.\n */\n hasHeaders?: boolean;\n}\n\n/**\n * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}.\n */\nexport interface BlobQueryArrowConfiguration {\n /**\n * Kind.\n */\n kind: \"arrow\";\n\n /**\n * List of {@link BlobQueryArrowField} describing the schema of the data.\n */\n schema: BlobQueryArrowField[];\n}\n\n/**\n * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}.\n */\nexport interface BlobQueryParquetConfiguration {\n /**\n * Kind.\n */\n kind: \"parquet\";\n}\n\n/**\n * Options to configure {@link BlockBlobClient.query} operation.\n */\nexport interface BlockBlobQueryOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Configurations for the query input.\n */\n inputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryParquetConfiguration;\n /**\n * Configurations for the query output.\n */\n outputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration;\n /**\n * Callback to receive events on the progress of query operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlock} operation.\n */\nexport interface BlockBlobStageBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * Callback to receive events on the progress of stage block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n\n /**\n * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation.\n */\nexport interface BlockBlobStageBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the bytes of the source Blob/File to upload.\n * If not specified, the entire content is uploaded as a single block.\n */\n range?: Range;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.commitBlockList} operation.\n */\nexport interface BlockBlobCommitBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when committing the block list.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when committing block list.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when committing block list.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.getBlockList} operation.\n */\nexport interface BlockBlobGetBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n}\n\n/**\n * Option interface for the {@link BlockBlobClient.uploadStream} operation.\n */\nexport interface BlockBlobUploadStreamOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Blob HTTP Headers.\n *\n * A common header to set is `blobContentType`, enabling the\n * browser to provide functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n/**\n * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}.\n */\nexport interface BlockBlobParallelUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Destination block blob size in bytes.\n */\n blockSize?: number;\n\n /**\n * Blob size threshold in bytes to start concurrency uploading.\n * Default value is 256MB, blob size less than this option will\n * be uploaded via one I/O operation without concurrency.\n * You can customize a value less equal than the default value.\n */\n maxSingleShotSize?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Blob HTTP Headers. A common header to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel uploading. Must be greater than or equal to 0.\n */\n concurrency?: number;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n\n/**\n * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and\n * {@link BlockBlobClient.uploadBrowserDate}.\n */\nexport type BlobUploadCommonResponse = BlockBlobUploadHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse;\n};\n\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nexport class BlockBlobClient extends BlobClient {\n /**\n * blobContext provided by protocol layer.\n *\n * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API\n * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient.\n */\n private _blobContext: StorageBlob;\n\n /**\n * blockBlobContext provided by protocol layer.\n */\n private blockBlobContext: BlockBlob;\n\n /**\n *\n * Creates an instance of BlockBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n if (blobNameOrOptions && typeof blobNameOrOptions !== \"string\") {\n options = blobNameOrOptions as StoragePipelineOptions;\n }\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.blockBlobContext = new BlockBlob(this.storageClientContext);\n this._blobContext = new StorageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): BlockBlobClient {\n return new BlockBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n public async query(\n query: string,\n options: BlockBlobQueryOptions = {}\n ): Promise {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-query\", options);\n\n try {\n if (!isNode) {\n throw new Error(\"This operation currently is only supported in Node.js.\");\n }\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const response = await this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n queryType: \"SQL\",\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration),\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n return new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError,\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public async upload(\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-upload\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.upload(contentLength, body, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n\n public async syncUploadFromURL(\n sourceURL: string,\n options: BlockBlobSyncUploadFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-syncUploadFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, {\n ...options,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions?.ifMatch,\n sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n public async stageBlock(\n blockId: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobStageBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlock\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n public async stageBlockFromURL(\n blockId: string,\n sourceURL: string,\n offset: number = 0,\n count?: number,\n options: BlockBlobStageBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlockFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n public async commitBlockList(\n blocks: string[],\n options: BlockBlobCommitBlockListOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-commitBlockList\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.commitBlockList(\n { latest: blocks },\n {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n public async getBlockList(\n listType: BlockListType,\n options: BlockBlobGetBlockListOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-getBlockList\", options);\n try {\n const res = await this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level functions\n\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n public async uploadData(\n data: Buffer | Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadData\", options);\n try {\n if (isNode) {\n let buffer: Buffer;\n if (data instanceof Buffer) {\n buffer = data;\n } else if (data instanceof ArrayBuffer) {\n buffer = Buffer.from(data);\n } else {\n data = data as ArrayBufferView;\n buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n\n return this.uploadSeekableInternal(\n (offset: number, size: number): Buffer => buffer.slice(offset, offset + size),\n buffer.byteLength,\n updatedOptions\n );\n } else {\n const browserBlob = new Blob([data]);\n return this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n }\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadBrowserData(\n browserData: Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadBrowserData\", options);\n try {\n const browserBlob = new Blob([browserData]);\n return await this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n private async uploadSeekableInternal(\n bodyFactory: (offset: number, size: number) => HttpRequestBody,\n size: number,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\n `blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`\n );\n }\n\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (\n options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES\n ) {\n throw new RangeError(\n `maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`\n );\n }\n\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(`${size} is too larger to upload to a block blob.`);\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options);\n\n try {\n if (size <= options.maxSingleShotSize) {\n return await this.upload(bodyFactory(0, size), size, updatedOptions);\n }\n\n const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\n `The buffer's size is too big or the BlockSize is too small;` +\n `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`\n );\n }\n\n const blockList: string[] = [];\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n\n const batch = new Batch(options.concurrency);\n for (let i = 0; i < numBlocks; i++) {\n batch.addOperation(async (): Promise => {\n const blockID = generateBlockID(blockIDPrefix, i);\n const start = options.blockSize! * i;\n const end = i === numBlocks - 1 ? size : start + options.blockSize!;\n const contentLength = end - start;\n blockList.push(blockID);\n await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress += contentLength;\n if (options.onProgress) {\n options.onProgress!({\n loadedBytes: transferProgress,\n });\n }\n });\n }\n await batch.do();\n\n return this.commitBlockList(blockList, updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadFile(\n filePath: string,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadFile\", options);\n try {\n const size = (await fsStat(filePath)).size;\n return await this.uploadSeekableInternal(\n (offset, count) => {\n return () =>\n fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset,\n });\n },\n size,\n {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadStream(\n stream: Readable,\n bufferSize: number = DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n maxConcurrency: number = 5,\n options: BlockBlobUploadStreamOptions = {}\n ): Promise {\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadStream\", options);\n\n try {\n let blockNum = 0;\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n const blockList: string[] = [];\n\n const scheduler = new BufferScheduler(\n stream,\n bufferSize,\n maxConcurrency,\n async (body, length) => {\n const blockID = generateBlockID(blockIDPrefix, blockNum);\n blockList.push(blockID);\n blockNum++;\n\n await this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n },\n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3)\n );\n await scheduler.do();\n\n return await this.commitBlockList(blockList, {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure the {@link PageBlobClient.create} operation.\n */\nexport interface PageBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when creating a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.uploadPages} operation.\n */\nexport interface PageBlobUploadPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Callback to receive events on the progress of upload pages operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.clearPages} operation.\n */\nexport interface PageBlobClearPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when clearing pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure page blob - get page ranges segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesSegment}\n * - {@link PageBlobClient.listPageRangeItemSegments}\n * - {@link PageBlobClient.listPageRangeItems}\n */\ninterface PageBlobListPageRangesSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRanges} operation.\n */\nexport interface PageBlobListPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n /**\n * (unused)\n */\n range?: string;\n}\n\n/**\n * Options to configure page blob - get page ranges diff segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesDiffSegment}\n * - {@link PageBlobClient.listPageRangeDiffItemSegments}\n * - {@link PageBlobClient.listPageRangeDiffItems}\n */\ninterface PageBlobListPageRangesDiffSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation.\n */\nexport interface PageBlobListPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.resize} operation.\n */\nexport interface PageBlobResizeOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when resizing a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link PageBlobClient.updateSequenceNumber} operation.\n */\nexport interface PageBlobUpdateSequenceNumberOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.startCopyIncremental} operation.\n */\nexport interface PageBlobStartCopyIncrementalOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when starting a copy incremental operation.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation.\n */\nexport interface PageBlobUploadPagesFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nexport class PageBlobClient extends BlobClient {\n /**\n * pageBlobsContext provided by protocol layer.\n */\n private pageBlobContext: PageBlob;\n\n /**\n *\n * Creates an instance of PageBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n *\n * @param url - A URL string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.pageBlobContext = new PageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): PageBlobClient {\n return new PageBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n public async create(\n size: number,\n options: PageBlobCreateOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-create\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n public async createIfNotExists(\n size: number,\n options: PageBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-createIfNotExists\", options);\n try {\n const conditions = { ifNoneMatch: ETagAny };\n const res = await this.create(size, {\n ...options,\n conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n public async uploadPages(\n body: HttpRequestBody,\n offset: number,\n count: number,\n options: PageBlobUploadPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPages\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPages(count, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n public async uploadPagesFromURL(\n sourceURL: string,\n sourceOffset: number,\n destOffset: number,\n count: number,\n options: PageBlobUploadPagesFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPagesFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPagesFromURL(\n sourceURL,\n rangeToString({ offset: sourceOffset, count }),\n 0,\n rangeToString({ offset: destOffset, count }),\n {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n public async clearPages(\n offset: number = 0,\n count?: number,\n options: PageBlobClearPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-clearPages\", options);\n try {\n return await this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n public async getPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobGetPageRangesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRanges\", options);\n try {\n return await this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesSegment returns a single segment of page ranges starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to PageBlob Get Page Ranges Segment operation.\n */\n private async listPageRangesSegment(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesSegment\", options);\n try {\n return await this.pageBlobContext.getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n marker: marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItemSegments(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(\n offset,\n count,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItems(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeItemSegments(\n offset,\n count,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges for a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges for a page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRanges()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRanges();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeItems(offset, count, options);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeItemSegments(offset, count, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...options,\n });\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiff\", options);\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesDiffSegment returns a single segment of page ranges starting from the\n * specified Marker for difference between previous snapshot and the target page blob.\n * Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesDiffSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async listPageRangesDiffSegment(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiffSegment\", options);\n try {\n return await this.pageBlobContext.getPageRangesDiff({\n abortSignal: options?.abortSignal,\n leaseAccessConditions: options?.conditions,\n modifiedAccessConditions: {\n ...options?.conditions,\n ifTags: options?.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshotOrUrl,\n range: rangeToString({\n offset: offset,\n count: count,\n }),\n marker: marker,\n maxPageSize: options?.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}\n *\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItemSegments(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItems(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRangesDiff();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobListPageRangesDiffOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, {\n ...options,\n });\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshot,\n settings.continuationToken,\n {\n maxPageSize: settings.maxPageSize,\n ...options,\n }\n );\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiffForManagedDisks(\n offset: number,\n count: number,\n prevSnapshotUrl: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\n \"PageBlobClient-GetPageRangesDiffForManagedDisks\",\n options\n );\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevSnapshotUrl,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n public async resize(\n size: number,\n options: PageBlobResizeOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-resize\", options);\n try {\n return await this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n public async updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n sequenceNumber?: number,\n options: PageBlobUpdateSequenceNumberOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-updateSequenceNumber\", options);\n try {\n return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n public async startCopyIncremental(\n copySource: string,\n options: PageBlobStartCopyIncrementalOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-startCopyIncremental\", options);\n try {\n return await this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"@azure/core-http\";\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport {\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n HeaderConstants,\n HTTPURLConnection,\n} from \"./utils/constants\";\nimport { getBodyAsText } from \"./BatchUtils\";\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { BatchSubResponse, ParsedBatchResponse } from \"./BatchResponse\";\nimport { logger } from \"./log\";\n\nconst HTTP_HEADER_DELIMITER = \": \";\nconst SPACE_DELIMITER = \" \";\nconst NOT_FOUND = -1;\n\n/**\n * Util class for parsing batch response.\n */\nexport class BatchResponseParser {\n private readonly batchResponse: ServiceSubmitBatchResponseModel;\n private readonly responseBatchBoundary: string;\n private readonly perResponsePrefix: string;\n private readonly batchResponseEnding: string;\n private readonly subRequests: Map;\n\n constructor(\n batchResponse: ServiceSubmitBatchResponseModel,\n subRequests: Map\n ) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType!.split(\"=\")[1];\n this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;\n this.batchResponseEnding = `--${this.responseBatchBoundary}--`;\n }\n\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n public async parseBatchResponse(): Promise {\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\n `Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`\n );\n }\n\n const responseBodyAsText = await getBodyAsText(this.batchResponse);\n\n const subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1); // string before first response boundary is useless\n const subResponseCount = subResponses.length;\n\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n\n const deserializedSubResponses: Array = new Array(subResponseCount);\n let subResponsesSucceededCount: number = 0;\n let subResponsesFailedCount: number = 0;\n\n // Parse sub subResponses.\n for (let index = 0; index < subResponseCount; index++) {\n const subResponse = subResponses[index];\n const deserializedSubResponse = {} as BatchSubResponse;\n deserializedSubResponse.headers = new HttpHeaders();\n\n const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);\n let subRespHeaderStartFound = false;\n let subRespHeaderEndFound = false;\n let subRespFailed = false;\n let contentId = NOT_FOUND;\n\n for (const responseLine of responseLines) {\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n\n const tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n\n continue; // Skip empty line\n }\n\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\n `Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`\n );\n }\n\n // Parse headers of sub response.\n const tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n } else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (\n contentId !== NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined\n ) {\n deserializedSubResponse._request = this.subRequests.get(contentId)!;\n deserializedSubResponses[contentId] = deserializedSubResponse;\n } else {\n logger.error(\n `subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`\n );\n }\n\n if (subRespFailed) {\n subResponsesFailedCount++;\n } else {\n subResponsesSucceededCount++;\n }\n }\n\n return {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount,\n };\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nenum MutexLockStatus {\n LOCKED,\n UNLOCKED,\n}\n\ntype Callback = (...args: any[]) => any;\n\n/**\n * An async mutex lock.\n */\nexport class Mutex {\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n public static async lock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n } else {\n this.onUnlockEvent(key, () => {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n });\n }\n\n /**\n * Unlock a key.\n *\n * @param key -\n */\n public static async unlock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === MutexLockStatus.LOCKED) {\n this.emitUnlockEvent(key);\n }\n delete this.keys[key];\n resolve();\n });\n }\n\n private static keys: { [key: string]: MutexLockStatus } = {};\n private static listeners: { [key: string]: Callback[] } = {};\n\n private static onUnlockEvent(key: string, handler: Callback) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n } else {\n this.listeners[key].push(handler);\n }\n }\n\n private static emitUnlockEvent(key: string) {\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n const handler = this.listeners[key].shift();\n setImmediate(() => {\n handler!.call(this);\n });\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n generateUuid,\n HttpHeaders,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n WebResource,\n TokenCredential,\n isTokenCredential,\n bearerTokenAuthenticationPolicy,\n isNode,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobClient, BlobDeleteOptions, BlobSetTierOptions } from \"./Clients\";\nimport { AccessTier } from \"./generatedModels\";\nimport { Mutex } from \"./utils/Mutex\";\nimport { Pipeline } from \"./Pipeline\";\nimport { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from \"./utils/utils.common\";\nimport {\n HeaderConstants,\n BATCH_MAX_REQUEST,\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n StorageOAuthScopes,\n} from \"./utils/constants\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { createSpan } from \"./utils/tracing\";\n\n/**\n * A request associated with a batch operation.\n */\nexport interface BatchSubRequest {\n /**\n * The URL of the resource to request operation.\n */\n url: string;\n\n /**\n * The credential used for sub request.\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service.\n * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n}\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nexport class BlobBatch {\n private batchRequest: InnerBatchRequest;\n private readonly batch: string = \"batch\";\n private batchType: \"delete\" | \"setAccessTier\" | undefined;\n\n constructor() {\n this.batchRequest = new InnerBatchRequest();\n }\n\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n public getMultiPartContentType(): string {\n return this.batchRequest.getMultipartContentType();\n }\n\n /**\n * Get assembled HTTP request body for sub requests.\n */\n public getHttpRequestBody(): string {\n return this.batchRequest.getHttpRequestBody();\n }\n\n /**\n * Get sub requests that are added into the batch request.\n */\n public getSubRequests(): Map {\n return this.batchRequest.getSubRequests();\n }\n\n private async addSubRequestInternal(\n subRequest: BatchSubRequest,\n assembleSubRequestFunc: () => Promise\n ): Promise {\n await Mutex.lock(this.batch);\n\n try {\n this.batchRequest.preAddSubRequest(subRequest);\n await assembleSubRequestFunc();\n this.batchRequest.postAddSubRequest(subRequest);\n } finally {\n await Mutex.unlock(this.batch);\n }\n }\n\n private setBatchType(batchType: \"delete\" | \"setAccessTier\"): void {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\n `BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`\n );\n }\n }\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlob(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param options -\n */\n public async deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise;\n\n public async deleteBlob(\n urlOrBlobClient: string | BlobClient,\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n options?: BlobDeleteOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n isTokenCredential(credentialOrOptions))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions as BlobDeleteOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchDeleteRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"delete\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n blobClient: BlobClient,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobAccessTier(\n urlOrBlobClient: string | BlobClient,\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n options?: BlobSetTierOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n let tier: AccessTier;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n isTokenCredential(credentialOrTier))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier as\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential;\n tier = tierOrOptions as AccessTier;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier as AccessTier;\n options = tierOrOptions as BlobSetTierOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchSetTierRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"setAccessTier\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(\n tier,\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nclass InnerBatchRequest {\n private operationCount: number;\n private body: string;\n private subRequests: Map;\n private readonly boundary: string;\n private readonly subRequestPrefix: string;\n private readonly multipartContentType: string;\n private readonly batchRequestEnding: string;\n\n constructor() {\n this.operationCount = 0;\n this.body = \"\";\n\n const tempGuid = generateUuid();\n\n // batch_{batchid}\n this.boundary = `batch_${tempGuid}`;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;\n // --batch_{batchid}--\n this.batchRequestEnding = `--${this.boundary}--`;\n\n this.subRequests = new Map();\n }\n\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public createPipeline(\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential\n ): Pipeline {\n const isAnonymousCreds = credential instanceof AnonymousCredential;\n const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n const factories: RequestPolicyFactory[] = new Array(policyFactoryLength);\n\n factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = isTokenCredential(credential)\n ? attachCredential(\n bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),\n credential\n )\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n\n return new Pipeline(factories, {});\n }\n\n public appendSubRequestToBody(request: WebResource) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix, // sub request constant prefix\n `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID\n \"\", // empty line after sub request's content ID\n `${request.method.toString()} ${getURLPathAndQuery(\n request.url\n )} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n\n for (const header of request.headers.headersArray()) {\n this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;\n }\n\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n }\n\n public preAddSubRequest(subRequest: BatchSubRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);\n }\n\n // Fast fail if url for sub request is invalid\n const path = getURLPath(subRequest.url);\n if (!path || path === \"\") {\n throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);\n }\n }\n\n public postAddSubRequest(subRequest: BatchSubRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n }\n\n // Return the http request body with assembling the ending line to the sub request body.\n public getHttpRequestBody(): string {\n return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;\n }\n\n public getMultipartContentType(): string {\n return this.multipartContentType;\n }\n\n public getSubRequests(): Map {\n return this.subRequests;\n }\n}\n\nclass BatchRequestAssemblePolicy extends BaseRequestPolicy {\n private batchRequest: InnerBatchRequest;\n private readonly dummyResponse: HttpOperationResponse = {\n request: new WebResource(),\n status: 200,\n headers: new HttpHeaders(),\n };\n\n constructor(\n batchRequest: InnerBatchRequest,\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ) {\n super(nextPolicy, options);\n\n this.batchRequest = batchRequest;\n }\n\n public async sendRequest(request: WebResource): Promise {\n await this.batchRequest.appendSubRequestToBody(request);\n\n return this.dummyResponse; // Intercept request from going to wire\n }\n}\n\nclass BatchRequestAssemblePolicyFactory implements RequestPolicyFactory {\n private batchRequest: InnerBatchRequest;\n\n constructor(batchRequest: InnerBatchRequest) {\n this.batchRequest = batchRequest;\n }\n\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): BatchRequestAssemblePolicy {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n }\n}\n\nclass BatchHeaderFilterPolicy extends BaseRequestPolicy {\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(request: WebResource): Promise {\n let xMsHeaderName = \"\";\n\n for (const header of request.headers.headersArray()) {\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n\nclass BatchHeaderFilterPolicyFactory implements RequestPolicyFactory {\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): BatchHeaderFilterPolicy {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AccessTier,\n ServiceSubmitBatchHeaders,\n ServiceSubmitBatchOptionalParamsModel,\n ServiceSubmitBatchResponseModel,\n} from \"./generatedModels\";\nimport { ParsedBatchResponse } from \"./BatchResponse\";\nimport { BatchResponseParser } from \"./BatchResponseParser\";\nimport { utf8ByteLength } from \"./BatchUtils\";\nimport { BlobBatch } from \"./BlobBatch\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { HttpResponse, TokenCredential } from \"@azure/core-http\";\nimport { Service, Container } from \"./generated/src/operations\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobDeleteOptions, BlobClient, BlobSetTierOptions } from \"./Clients\";\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike, StoragePipelineOptions, newPipeline, isPipelineLike } from \"./Pipeline\";\nimport { getURLPath } from \"./utils/utils.common\";\n\n/**\n * Options to configure the Service - Submit Batch Optional Params.\n */\nexport interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel {}\n\n/**\n * Contains response data for blob batch operations.\n */\nexport declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse &\n ServiceSubmitBatchHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n };\n\n/**\n * Contains response data for the {@link deleteBlobs} operation.\n */\nexport declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * Contains response data for the {@link setBlobsAccessTier} operation.\n */\nexport declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nexport class BlobBatchClient {\n private serviceOrContainerContext: Service | Container;\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n\n const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n\n const path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n } else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n public createBatch(): BlobBatch {\n return new BlobBatch();\n }\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operations will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resources to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlobs(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation(subrequest) will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs to delete.\n * @param options -\n */\n public async deleteBlobs(\n blobClients: BlobClient[],\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n public async deleteBlobs(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as TokenCredential, options);\n } else {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as BlobDeleteOptions);\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs which should have a new tier set.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n blobClients: BlobClient[],\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobsAccessTier(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as TokenCredential,\n tierOrOptions as AccessTier,\n options\n );\n } else {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as AccessTier,\n tierOrOptions as BlobSetTierOptions\n );\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n public async submitBatch(\n batchRequest: BlobBatch,\n options: BlobBatchSubmitBatchOptionalParams = {}\n ): Promise {\n if (!batchRequest || batchRequest.getSubRequests().size === 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n\n const { span, updatedOptions } = createSpan(\"BlobBatchClient-submitBatch\", options);\n try {\n const batchRequestBody = batchRequest.getHttpRequestBody();\n\n // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.\n const rawBatchResponse: ServiceSubmitBatchResponseModel =\n await this.serviceOrContainerContext.submitBatch(\n utf8ByteLength(batchRequestBody),\n batchRequest.getMultiPartContentType(),\n batchRequestBody,\n {\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).\n const batchResponseParser = new BatchResponseParser(\n rawBatchResponse,\n batchRequest.getSubRequests()\n );\n const responseSummary = await batchResponseParser.parseBatchResponse();\n\n const res: BlobBatchSubmitBatchResponse = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { Container } from \"./generated/src/operations\";\nimport {\n BlobDeleteResponse,\n BlobPrefix,\n BlobProperties,\n BlockBlobUploadResponse,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ContainerEncryptionScope,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesResponse,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataResponse,\n FilterBlobItem,\n FilterBlobSegment,\n FilterBlobSegmentModel,\n LeaseAccessConditions,\n ListBlobsFlatSegmentResponseModel,\n ListBlobsHierarchySegmentResponseModel,\n PublicAccessType,\n SignedIdentifierModel,\n} from \"./generatedModels\";\nimport {\n Metadata,\n ObjectReplicationPolicy,\n Tags,\n ContainerRequestConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n BlobNameToString,\n ConvertInternalResponseOfListBlobFlat,\n ConvertInternalResponseOfListBlobHierarchy,\n EscapePath,\n extractConnectionStringParts,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n toTags,\n truncatedISO8061Date,\n} from \"./utils/utils.common\";\nimport { ContainerSASPermissions } from \"./sas/ContainerSASPermissions\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n AppendBlobClient,\n BlobClient,\n BlobDeleteOptions,\n BlockBlobClient,\n BlockBlobUploadOptions,\n CommonGenerateSasUrlOptions,\n PageBlobClient,\n} from \"./Clients\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { ListBlobsIncludeItem } from \"./generated/src\";\n\n/**\n * Options to configure {@link ContainerClient.create} operation.\n */\nexport interface ContainerCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the container.\n */\n metadata?: Metadata;\n /**\n * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include:\n * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account.\n * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request.\n */\n access?: PublicAccessType;\n /**\n * Container encryption scope info.\n */\n containerEncryptionScope?: ContainerEncryptionScope;\n}\n\n/**\n * Options to configure {@link ContainerClient.getProperties} operation.\n */\nexport interface ContainerGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.delete} operation.\n */\nexport interface ContainerDeleteMethodOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting the container.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.exists} operation.\n */\nexport interface ContainerExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure {@link ContainerClient.setMetadata} operation.\n */\nexport interface ContainerSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.getAccessPolicy} operation.\n */\nexport interface ContainerGetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Signed identifier.\n */\nexport interface SignedIdentifier {\n /**\n * a unique id\n */\n id: string;\n /**\n * Access Policy\n */\n accessPolicy: {\n /**\n * Optional. The date-time the policy is active\n */\n startsOn?: Date;\n /**\n * Optional. The date-time the policy expires\n */\n expiresOn?: Date;\n /**\n * The permissions for the acl policy\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n */\n permissions?: string;\n };\n}\n\n/**\n * Contains response data for the {@link ContainerClient.getAccessPolicy} operation.\n */\nexport declare type ContainerGetAccessPolicyResponse = {\n signedIdentifiers: SignedIdentifier[];\n} & ContainerGetAccessPolicyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: SignedIdentifierModel[];\n };\n };\n\n/**\n * Options to configure {@link ContainerClient.setAccessPolicy} operation.\n */\nexport interface ContainerSetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting the access policy.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure Container - Acquire Lease operation.\n */\nexport interface ContainerAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Release Lease operation.\n */\nexport interface ContainerReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Renew Lease operation.\n */\nexport interface ContainerRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Break Lease operation.\n */\nexport interface ContainerBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Change Lease operation.\n */\nexport interface ContainerChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.deleteBlob} operation.\n */\nexport interface ContainerDeleteBlobOptions extends BlobDeleteOptions {\n /**\n * An opaque DateTime value that, when present, specifies the version\n * of the blob to delete. It's for service version 2019-10-10 and newer.\n */\n versionId?: string;\n}\n\n/**\n * Options to configure Container - List Segment operations.\n *\n * See:\n * - {@link ContainerClient.listSegments}\n * - {@link ContainerClient.listBlobFlatSegment}\n * - {@link ContainerClient.listBlobHierarchySegment}\n * - {@link ContainerClient.listHierarchySegments}\n * - {@link ContainerClient.listItemsByHierarchy}\n */\ninterface ContainerListBlobsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify one or more datasets to include in the response.\n */\n include?: ListBlobsIncludeItem[];\n}\n\n/**\n * An interface representing BlobHierarchyListSegment.\n */\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobHierarchySegment operation.\n */\nexport type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse &\n ContainerListBlobHierarchySegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsHierarchySegmentResponseModel;\n };\n };\n\n/**\n * An Azure Storage blob\n */\nexport interface BlobItem {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n properties: BlobProperties;\n metadata?: { [propertyName: string]: string };\n tags?: Tags;\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n hasVersionsOnly?: boolean;\n}\n\n/**\n * An interface representing BlobFlatListSegment.\n */\nexport interface BlobFlatListSegment {\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobFlatSegment operation.\n */\nexport type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse &\n ContainerListBlobFlatSegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsFlatSegmentResponseModel;\n };\n };\n\n/**\n * Options to configure Container - List Blobs operations.\n *\n * See:\n * - {@link ContainerClient.listBlobsFlat}\n * - {@link ContainerClient.listBlobsByHierarchy}\n */\nexport interface ContainerListBlobsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n\n /**\n * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response.\n */\n includeCopy?: boolean;\n /**\n * Specifies whether soft deleted blobs should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether blob metadata be returned in the response.\n */\n includeMetadata?: boolean;\n /**\n * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response.\n */\n includeSnapshots?: boolean;\n /**\n * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response.\n */\n includeVersions?: boolean;\n /**\n * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response.\n */\n includeUncommitedBlobs?: boolean;\n /**\n * Specifies whether blob tags be returned in the response.\n */\n includeTags?: boolean;\n /**\n * Specifies whether deleted blob with versions be returned in the response.\n */\n includeDeletedWithVersions?: boolean;\n /**\n * Specifies whether blob immutability policy be returned in the response.\n */\n includeImmutabilityPolicy?: boolean;\n /**\n * Specifies whether blob legal hold be returned in the response.\n */\n includeLegalHold?: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.createIfNotExists} operation.\n */\nexport interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse {\n /**\n * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.deleteIfExists} operation.\n */\nexport interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse {\n /**\n * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Options to configure {@link ContainerClient.generateSasUrl} operation.\n */\nexport interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: ContainerSASPermissions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.findBlobsByTagsSegment} operation.\n */\ninterface ContainerFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ContainerFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ContainerFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nexport class ContainerClient extends StorageClient {\n /**\n * containerContext provided by protocol layer.\n */\n private containerContext: Container;\n\n private _containerName: string;\n\n /**\n * The name of the container.\n */\n public get containerName(): string {\n return this._containerName;\n }\n /**\n *\n * Creates an instance of ContainerClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions);\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n super(url, pipeline);\n this._containerName = this.getContainerNameFromUrl();\n this.containerContext = new Container(this.storageClientContext);\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n public async create(options: ContainerCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-create\", options);\n try {\n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return await this.containerContext.create({\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n *\n * @param options -\n */\n public async createIfNotExists(\n options: ContainerCreateOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-createIfNotExists\", options);\n try {\n const res = await this.create(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message:\n \"Expected exception when creating a container only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n public async exists(options: ContainerExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-exists\", options);\n try {\n await this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when checking container existence\",\n });\n return false;\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n public getBlobClient(blobName: string): BlobClient {\n return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n public getAppendBlobClient(blobName: string): AppendBlobClient {\n return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public getBlockBlobClient(blobName: string): BlockBlobClient {\n return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n public getPageBlobClient(blobName: string): PageBlobClient {\n return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);\n }\n\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n public async getProperties(\n options: ContainerGetPropertiesOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getProperties\", options);\n try {\n return await this.containerContext.getProperties({\n abortSignal: options.abortSignal,\n ...options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async delete(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-delete\", options);\n try {\n return await this.containerContext.delete({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async deleteIfExists(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteIfExists\", options);\n\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a container only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: ContainerSetMetadataOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\n \"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\"\n );\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-setMetadata\", options);\n\n try {\n return await this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n public async getAccessPolicy(\n options: ContainerGetAccessPolicyOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getAccessPolicy\", options);\n\n try {\n const response = await this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const res: ContainerGetAccessPolicyResponse = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version,\n };\n\n for (const identifier of response) {\n let accessPolicy: any = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions,\n };\n\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n\n res.signedIdentifiers.push({\n accessPolicy,\n id: identifier.id,\n });\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n public async setAccessPolicy(\n access?: PublicAccessType,\n containerAcl?: SignedIdentifier[],\n options: ContainerSetAccessPolicyOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"ContainerClient-setAccessPolicy\", options);\n try {\n const acl: SignedIdentifierModel[] = [];\n for (const identifier of containerAcl || []) {\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\",\n },\n id: identifier.id,\n });\n }\n\n return await this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n public async uploadBlockBlob(\n blobName: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }> {\n const { span, updatedOptions } = createSpan(\"ContainerClient-uploadBlockBlob\", options);\n try {\n const blockBlobClient = this.getBlockBlobClient(blobName);\n const response = await blockBlobClient.upload(body, contentLength, updatedOptions);\n return {\n blockBlobClient,\n response,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n public async deleteBlob(\n blobName: string,\n options: ContainerDeleteBlobOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteBlob\", options);\n try {\n let blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return await blobClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n private async listBlobFlatSegment(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-listBlobFlatSegment\", options);\n try {\n const response = await this.containerContext.listBlobFlatSegment({\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerListBlobFlatSegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n private async listBlobHierarchySegment(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"ContainerClient-listBlobHierarchySegment\",\n options\n );\n try {\n const response = await this.containerContext.listBlobHierarchySegment(delimiter, {\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerListBlobHierarchySegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefix = {\n ...blobPrefixInternal,\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listSegments(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsFlatSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options);\n marker = listBlobsFlatSegmentResponse.continuationToken;\n yield await listBlobsFlatSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n private async *listItems(\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) {\n yield* listBlobsFlatSegmentResponse.segment.blobItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listBlobsFlat(\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator {\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listItems(updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listHierarchySegments(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsHierarchySegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(\n delimiter,\n marker,\n options\n );\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n yield await listBlobsHierarchySegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n private async *listItemsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator<({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem)> {\n let marker: string | undefined;\n for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(\n delimiter,\n marker,\n options\n )) {\n const segment = listBlobsHierarchySegmentResponse.segment;\n if (segment.blobPrefixes) {\n for (const prefix of segment.blobPrefixes) {\n yield {\n kind: \"prefix\",\n ...prefix,\n };\n }\n }\n for (const blob of segment.blobItems) {\n yield { kind: \"blob\", ...blob };\n }\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient\n * .listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\" })\n * .byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n public listBlobsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator<\n ({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem),\n ContainerListBlobHierarchySegmentResponse\n > {\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n const iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n async next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listHierarchySegments(delimiter, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in the container whose tags\n * match a given search expression.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-findBlobsByTagsSegment\", options);\n\n try {\n const response = await this.containerContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified container.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of containerClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = containerClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = containerClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ContainerFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n private getContainerNameFromUrl(): string {\n let containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath()!.split(\"/\")[2];\n } else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n }\n\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return containerName;\n } catch (error: any) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n }\n\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class AccountSASPermissions {\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n public static parse(permissions: string): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n\n for (const c of permissions) {\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n case \"i\":\n accountSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n accountSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission character: ${c}`);\n }\n }\n\n return accountSASPermissions;\n }\n\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n accountSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n accountSASPermissions.permanentDelete = true;\n }\n return accountSASPermissions;\n }\n\n /**\n * Permission to read resources and list queues and tables granted.\n */\n public read: boolean = false;\n\n /**\n * Permission to write resources granted.\n */\n public write: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public delete: boolean = false;\n\n /**\n * Permission to delete versions granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n public list: boolean = false;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n public add: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public create: boolean = false;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n public update: boolean = false;\n\n /**\n * Permission to get and delete messages granted.\n */\n public process: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Permission to filter blobs.\n */\n public filter: boolean = false;\n\n /**\n * Permission to set immutability policy.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like an account SAS permission.\n * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface AccountSASPermissionsLike {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n read?: boolean;\n\n /**\n * Permission to write resources granted.\n */\n write?: boolean;\n\n /**\n * Permission to delete blobs and files granted.\n */\n delete?: boolean;\n\n /**\n * Permission to delete versions granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n list?: boolean;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n add?: boolean;\n\n /**\n * Permission to create blobs and files granted.\n */\n create?: boolean;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n update?: boolean;\n\n /**\n * Permission to get and delete messages granted.\n */\n process?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Permission to filter blobs.\n */\n filter?: boolean;\n\n /**\n * Permission to set immutability policy.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nexport class AccountSASResourceTypes {\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n public static parse(resourceTypes: string): AccountSASResourceTypes {\n const accountSASResourceTypes = new AccountSASResourceTypes();\n\n for (const c of resourceTypes) {\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(`Invalid resource type: ${c}`);\n }\n }\n\n return accountSASResourceTypes;\n }\n\n /**\n * Permission to access service level APIs granted.\n */\n public service: boolean = false;\n\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n public container: boolean = false;\n\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n public object: boolean = false;\n\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n const resourceTypes: string[] = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nexport class AccountSASServices {\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n public static parse(services: string): AccountSASServices {\n const accountSASServices = new AccountSASServices();\n\n for (const c of services) {\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(`Invalid service character: ${c}`);\n }\n }\n\n return accountSASServices;\n }\n\n /**\n * Permission to access blob resources granted.\n */\n public blob: boolean = false;\n\n /**\n * Permission to access file resources granted.\n */\n public file: boolean = false;\n\n /**\n * Permission to access queue resources granted.\n */\n public queue: boolean = false;\n\n /**\n * Permission to access table resources granted.\n */\n public table: boolean = false;\n\n /**\n * Converts the given services to a string.\n *\n */\n public toString(): string {\n const services: string[] = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccountSASPermissions } from \"./AccountSASPermissions\";\nimport { AccountSASResourceTypes } from \"./AccountSASResourceTypes\";\nimport { AccountSASServices } from \"./AccountSASServices\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once\n * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation\n * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters}\n * exist because the former is mutable and a logical representation while the latter is immutable and used to generate\n * actual REST requests.\n *\n * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1\n * for more conceptual information on SAS\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n * for descriptions of the parameters, including which are required\n */\nexport interface AccountSASSignatureValues {\n /**\n * If not provided, this defaults to the service version targeted by this version of the library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * The time after which the SAS will no longer work.\n */\n expiresOn: Date;\n\n /**\n * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help\n * constructing the permissions string.\n */\n permissions: AccountSASPermissions;\n\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n\n /**\n * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to\n * construct this value.\n */\n services: string;\n\n /**\n * The values that indicate the resource types accessible with this SAS. Please refer\n * to {@link AccountSASResourceTypes} to construct this value.\n */\n resourceTypes: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateAccountSASQueryParameters(\n accountSASSignatureValues: AccountSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n const version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'y' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n\n if (accountSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n const parsedPermissions = AccountSASPermissions.parse(\n accountSASSignatureValues.permissions.toString()\n );\n const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n const parsedResourceTypes = AccountSASResourceTypes.parse(\n accountSASSignatureValues.resourceTypes\n ).toString();\n\n let stringToSign: string;\n\n if (version >= \"2020-12-06\") {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : \"\",\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n } else {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n }\n\n const signature: string = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n version,\n signature,\n parsedPermissions.toString(),\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.protocol,\n accountSASSignatureValues.startsOn,\n accountSASSignatureValues.expiresOn,\n accountSASSignatureValues.ipRange,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n accountSASSignatureValues.encryptionScope\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport {\n TokenCredential,\n isTokenCredential,\n isNode,\n HttpResponse,\n getDefaultProxySettings,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n ServiceGetUserDelegationKeyHeaders,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ServiceGetPropertiesResponse,\n BlobServiceProperties,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentResponse,\n ContainerItem,\n UserDelegationKeyModel,\n ContainerUndeleteResponse,\n FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n ContainerRenameResponse,\n LeaseAccessConditions,\n FilterBlobSegment,\n FilterBlobItem,\n} from \"./generatedModels\";\nimport { Container, Service } from \"./generated/src/operations\";\nimport { newPipeline, StoragePipelineOptions, PipelineLike, isPipelineLike } from \"./Pipeline\";\nimport {\n ContainerClient,\n ContainerCreateOptions,\n ContainerDeleteMethodOptions,\n} from \"./ContainerClient\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n toTags,\n} from \"./utils/utils.common\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport \"@azure/core-paging\";\nimport { PageSettings, PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { truncatedISO8061Date } from \"./utils/utils.common\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { AccountSASPermissions } from \"./sas/AccountSASPermissions\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateAccountSASQueryParameters } from \"./sas/AccountSASSignatureValues\";\nimport { AccountSASServices } from \"./sas/AccountSASServices\";\nimport { ListContainersIncludeType } from \"./generated/src\";\n\n/**\n * Options to configure the {@link BlobServiceClient.getProperties} operation.\n */\nexport interface ServiceGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.setProperties} operation.\n */\nexport interface ServiceSetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getAccountInfo} operation.\n */\nexport interface ServiceGetAccountInfoOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getStatistics} operation.\n */\nexport interface ServiceGetStatisticsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the Service - Get User Delegation Key.\n */\nexport interface ServiceGetUserDelegationKeyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainerSegment} operation.\n */\ninterface ServiceListContainersSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify that the container's metadata be returned as part of the response\n * body. Possible values include: 'metadata'\n */\n include?: ListContainersIncludeType | ListContainersIncludeType[];\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainers} operation.\n */\nexport interface ServiceListContainersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies whether the container's metadata\n * should be returned as part of the response body.\n */\n includeMetadata?: boolean;\n\n /**\n * Specifies whether soft deleted containers should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether system containers should be included in the response.\n */\n includeSystem?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTagsSegment} operation.\n */\ninterface ServiceFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ServiceFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ServiceFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A user delegation key.\n */\nexport interface UserDelegationKey {\n /**\n * The Azure Active Directory object ID in GUID format.\n */\n signedObjectId: string;\n /**\n * The Azure Active Directory tenant ID in GUID format.\n */\n signedTenantId: string;\n /**\n * The date-time the key is active.\n */\n signedStartsOn: Date;\n /**\n * The date-time the key expires.\n */\n signedExpiresOn: Date;\n /**\n * Abbreviation of the Azure Storage service that accepts the key.\n */\n signedService: string;\n /**\n * The service version that created the key.\n */\n signedVersion: string;\n /**\n * The key as a base64 string.\n */\n value: string;\n}\n\n/**\n * Contains response data for the {@link getUserDelegationKey} operation.\n */\nexport declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey &\n ServiceGetUserDelegationKeyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: UserDelegationKeyModel;\n };\n };\n\n/**\n * Options to configure {@link BlobServiceClient.undeleteContainer} operation.\n */\nexport interface ServiceUndeleteContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies the new name of the restored container.\n * Will use its original name if this is not specified.\n * @deprecated Restore container to a different name is not supported by service anymore.\n */\n destinationContainerName?: string;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.renameContainer} operation.\n */\nexport interface ServiceRenameContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Condition to meet for the source container.\n */\n sourceCondition?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation.\n */\nexport interface ServiceGenerateAccountSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nexport class BlobServiceClient extends StorageClient {\n /**\n * serviceContext provided by protocol layer.\n */\n private serviceContext: Service;\n\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n public static fromConnectionString(\n connectionString: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ): BlobServiceClient {\n options = options || {};\n const extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n const pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n const pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n }\n\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n *\n * Example using DefaultAzureCredential from `@azure/identity`:\n *\n * ```js\n * const account = \"\";\n *\n * const defaultAzureCredential = new DefaultAzureCredential();\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * defaultAzureCredential\n * );\n * ```\n *\n * Example using an account name/key:\n *\n * ```js\n * const account = \"\"\n * const sharedKeyCredential = new StorageSharedKeyCredential(account, \"\");\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * sharedKeyCredential\n * );\n * ```\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (\n (isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipeline)\n ) {\n pipeline = newPipeline(credentialOrPipeline, options);\n } else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n super(url, pipeline);\n this.serviceContext = new Service(this.storageClientContext);\n }\n\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n public getContainerClient(containerName: string): ContainerClient {\n return new ContainerClient(\n appendToURLPath(this.url, encodeURIComponent(containerName)),\n this.pipeline\n );\n }\n\n /**\n * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n public async createContainer(\n containerName: string,\n options: ContainerCreateOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerCreateResponse: ContainerCreateResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-createContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n const containerCreateResponse = await containerClient.create(updatedOptions);\n return {\n containerClient,\n containerCreateResponse,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n public async deleteContainer(\n containerName: string,\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-deleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n return await containerClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n public async undeleteContainer(\n deletedContainerName: string,\n deletedContainerVersion: string,\n options: ServiceUndeleteContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerUndeleteResponse: ContainerUndeleteResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-undeleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(\n options.destinationContainerName || deletedContainerName\n );\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerUndeleteResponse = await containerContext.restore({\n deletedContainerName,\n deletedContainerVersion,\n ...updatedOptions,\n });\n return { containerClient, containerUndeleteResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n private async renameContainer(\n sourceContainerName: string,\n destinationContainerName: string,\n options: ServiceRenameContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerRenameResponse: ContainerRenameResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-renameContainer\", options);\n try {\n const containerClient = this.getContainerClient(destinationContainerName);\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerRenameResponse = await containerContext.rename(sourceContainerName, {\n ...updatedOptions,\n sourceLeaseId: options.sourceCondition?.leaseId,\n });\n return { containerClient, containerRenameResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n public async getProperties(\n options: ServiceGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getProperties\", options);\n try {\n return await this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n public async setProperties(\n properties: BlobServiceProperties,\n options: ServiceSetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-setProperties\", options);\n try {\n return await this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n public async getStatistics(\n options: ServiceGetStatisticsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getStatistics\", options);\n try {\n return await this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n public async getAccountInfo(\n options: ServiceGetAccountInfoOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getAccountInfo\", options);\n try {\n return await this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n private async listContainersSegment(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-listContainersSegment\", options);\n\n try {\n return await this.serviceContext.listContainersSegment({\n abortSignal: options.abortSignal,\n marker,\n ...options,\n include: typeof options.include === \"string\" ? [options.include] : options.include,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"BlobServiceClient-findBlobsByTagsSegment\",\n options\n );\n\n try {\n const response = await this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ServiceFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ServiceFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n private async *listSegments(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let listContainersSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listContainersSegmentResponse = await this.listContainersSegment(marker, options);\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n yield await listContainersSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n private async *listItems(\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.listSegments(marker, options)) {\n yield* segment.containerItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listContainers(\n options: ServiceListContainersOptions = {}\n ): PagedAsyncIterableIterator {\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const include: ListContainersIncludeType[] = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSystem) {\n include.push(\"system\");\n }\n\n // AsyncIterableIterator to iterate over containers\n const listSegmentOptions: ServiceListContainersSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include } : {}),\n };\n\n const iter = this.listItems(listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n public async getUserDelegationKey(\n startsOn: Date,\n expiresOn: Date,\n options: ServiceGetUserDelegationKeyOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getUserDelegationKey\", options);\n try {\n const response = await this.serviceContext.getUserDelegationKey(\n {\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false),\n },\n {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n const userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value,\n };\n\n const res: ServiceGetUserDelegationKeyResponse = {\n _response: response._response,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n version: response.version,\n date: response.date,\n errorCode: response.errorCode,\n ...userDelegationKey,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateAccountSasUrl(\n expiresOn?: Date,\n permissions: AccountSASPermissions = AccountSASPermissions.parse(\"r\"),\n resourceTypes: string = \"sco\",\n options: ServiceGenerateAccountSasUrlOptions = {}\n ): string {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\n \"Can only generate the account SAS when the client is initialized with a shared key credential\"\n );\n }\n\n if (expiresOn === undefined) {\n const now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n\n const sas = generateAccountSASQueryParameters(\n {\n permissions,\n expiresOn,\n resourceTypes,\n services: AccountSASServices.parse(\"b\").toString(),\n ...options,\n },\n this.credential\n ).toString();\n\n return appendToURLQuery(this.url, sas);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Tags } from \".\";\nimport { BlobTags, BlobPropertiesInternal as BlobProperties } from \"./generated/src/models\";\n\nexport {\n AccessPolicy,\n AccessTier,\n AccountKind,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlHeaders,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockHeaders,\n AppendBlobCreateHeaders,\n ArchiveStatus,\n BlobDeleteImmutabilityPolicyHeaders,\n BlobDeleteImmutabilityPolicyResponse,\n BlobImmutabilityPolicyMode,\n BlobAbortCopyFromURLHeaders,\n BlobCopyFromURLHeaders,\n BlobCopySourceTags,\n BlobCreateSnapshotHeaders,\n BlobDeleteHeaders,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobGetPropertiesHeaders,\n BlobGetPropertiesResponse as BlobGetPropertiesResponseModel,\n BlobPropertiesInternal as BlobProperties,\n BlobUndeleteResponse,\n BlobHttpHeaders as BlobHTTPHeaders,\n BlobSetHttpHeadersHeaders as BlobSetHTTPHeadersHeaders,\n BlobSetHttpHeadersResponse as BlobSetHTTPHeadersResponse,\n BlobSetImmutabilityPolicyHeaders,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldHeaders,\n BlobSetLegalHoldResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLHeaders,\n BlobStartCopyFromURLResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobSetMetadataHeaders,\n BlobSetTierHeaders,\n BlobSetTierResponse,\n BlobSetTagsHeaders,\n BlobDownloadHeaders,\n BlobDownloadResponse as BlobDownloadResponseModel,\n BlobType,\n BlobTags,\n BlobUndeleteHeaders,\n Block,\n BlockBlobCommitBlockListHeaders,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListHeaders,\n BlockBlobStageBlockFromURLHeaders,\n BlockBlobStageBlockHeaders,\n BlockList,\n BlockListType,\n BlockBlobGetBlockListResponse,\n BlobServiceProperties,\n BlobServiceStatistics,\n BlobGetTagsHeaders,\n BlobTag,\n ContainerCreateHeaders,\n ContainerCreateResponse,\n ContainerDeleteHeaders,\n ContainerDeleteResponse,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesHeaders,\n ContainerBreakLeaseOptionalParams,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerGetPropertiesResponse,\n ContainerProperties,\n ContainerSetMetadataResponse,\n ContainerSetAccessPolicyHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataHeaders,\n CopyStatusType,\n CorsRule,\n CpkInfo,\n DeleteSnapshotsOptionType,\n EncryptionAlgorithmType,\n GeoReplication,\n GeoReplicationStatusType,\n LeaseAccessConditions,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n ListContainersSegmentResponse,\n FilterBlobItem as FilterBlobItemModel,\n FilterBlobSegment as FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n Logging,\n Metrics,\n ModifiedAccessConditions as ModifiedAccessConditionsModel,\n PublicAccessType,\n PageBlobCreateResponse,\n PageBlobUploadPagesResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobClearPagesHeaders,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalHeaders,\n PageBlobCreateHeaders,\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesResponse as PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesDiffResponse as PageBlobGetPageRangesDiffResponseModel,\n PageBlobResizeHeaders,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberHeaders,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLHeaders,\n PageBlobUploadPagesHeaders,\n PageBlobCopyIncrementalResponse,\n SequenceNumberActionType,\n RehydratePriority,\n RetentionPolicy,\n AppendPositionAccessConditions,\n ServiceGetUserDelegationKeyHeaders,\n ServiceSubmitBatchHeaders,\n ServiceGetAccountInfoHeaders,\n ServiceGetPropertiesHeaders,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsHeaders,\n SequenceNumberAccessConditions,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentHeaders,\n ServiceListContainersSegmentResponse,\n ServiceSetPropertiesHeaders,\n SkuName,\n StaticWebsite,\n ContainerItem,\n ServiceSubmitBatchResponse as ServiceSubmitBatchResponseModel,\n ServiceSubmitBatchOptionalParams as ServiceSubmitBatchOptionalParamsModel,\n SignedIdentifier as SignedIdentifierModel,\n SyncCopyStatusType,\n UserDelegationKey as UserDelegationKeyModel,\n ContainerEncryptionScope,\n BlobQueryHeaders,\n BlobQueryResponse as BlobQueryResponseModel,\n ContainerRestoreResponse as ContainerUndeleteResponse,\n ContainerRestoreHeaders as ContainerUndeleteHeaders,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobPutBlobFromUrlHeaders,\n ContainerRenameResponse,\n ContainerRenameHeaders,\n} from \"./generated/src/models\";\n\n// Following definitions are to avoid breaking change.\nexport interface BlobPrefix {\n name: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegmentModel {\n blobItems: BlobItemInternal[];\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegmentModel {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\n/**\n * Blob info from a {@link BlobServiceClient.findBlobsByTags}\n */\nexport interface FilterBlobItem {\n /**\n * Blob Name.\n */\n name: string;\n\n /**\n * Container Name.\n */\n containerName: string;\n\n /**\n * Blob Tags.\n */\n tags?: Tags;\n\n /**\n * Tag value.\n *\n * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags.\n */\n tagValue: string;\n}\n\n/**\n * Segment response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\nexport interface PageRangeInfo {\n start: number;\n end: number;\n isClear: boolean;\n}\n\n/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */\nexport enum KnownEncryptionAlgorithmType {\n AES256 = \"AES256\",\n}\n"],"names":["BlobServicePropertiesMapper","QueryCollectionFormat","KeyInfoMapper","QueryRequestMapper","BlobTagsMapper","BlockLookupListMapper","coreHttp","getPropertiesOperationSpec","getAccountInfoOperationSpec","submitBatchOperationSpec","filterBlobsOperationSpec","xmlSerializer","Mappers.ServiceSetPropertiesHeaders","Mappers.StorageError","Mappers.ServiceSetPropertiesExceptionHeaders","Parameters.blobServiceProperties","Parameters.restype","Parameters.comp","Parameters.timeoutInSeconds","Parameters.url","Parameters.contentType","Parameters.accept","Parameters.version","Parameters.requestId","Mappers.BlobServiceProperties","Mappers.ServiceGetPropertiesHeaders","Mappers.ServiceGetPropertiesExceptionHeaders","Parameters.accept1","Mappers.BlobServiceStatistics","Mappers.ServiceGetStatisticsHeaders","Mappers.ServiceGetStatisticsExceptionHeaders","Parameters.comp1","Mappers.ListContainersSegmentResponse","Mappers.ServiceListContainersSegmentHeaders","Mappers.ServiceListContainersSegmentExceptionHeaders","Parameters.comp2","Parameters.prefix","Parameters.marker","Parameters.maxPageSize","Parameters.include","Mappers.UserDelegationKey","Mappers.ServiceGetUserDelegationKeyHeaders","Mappers.ServiceGetUserDelegationKeyExceptionHeaders","Parameters.keyInfo","Parameters.comp3","Mappers.ServiceGetAccountInfoHeaders","Mappers.ServiceGetAccountInfoExceptionHeaders","Parameters.restype1","Mappers.ServiceSubmitBatchHeaders","Mappers.ServiceSubmitBatchExceptionHeaders","Parameters.body","Parameters.comp4","Parameters.contentLength","Parameters.multipartContentType","Mappers.FilterBlobSegment","Mappers.ServiceFilterBlobsHeaders","Mappers.ServiceFilterBlobsExceptionHeaders","Parameters.comp5","Parameters.where","createOperationSpec","deleteOperationSpec","setMetadataOperationSpec","acquireLeaseOperationSpec","releaseLeaseOperationSpec","renewLeaseOperationSpec","breakLeaseOperationSpec","changeLeaseOperationSpec","Mappers.ContainerCreateHeaders","Mappers.ContainerCreateExceptionHeaders","Parameters.restype2","Parameters.metadata","Parameters.access","Parameters.defaultEncryptionScope","Parameters.preventEncryptionScopeOverride","Mappers.ContainerGetPropertiesHeaders","Mappers.ContainerGetPropertiesExceptionHeaders","Parameters.leaseId","Mappers.ContainerDeleteHeaders","Mappers.ContainerDeleteExceptionHeaders","Parameters.ifModifiedSince","Parameters.ifUnmodifiedSince","Mappers.ContainerSetMetadataHeaders","Mappers.ContainerSetMetadataExceptionHeaders","Parameters.comp6","Mappers.ContainerGetAccessPolicyHeaders","Mappers.ContainerGetAccessPolicyExceptionHeaders","Parameters.comp7","Mappers.ContainerSetAccessPolicyHeaders","Mappers.ContainerSetAccessPolicyExceptionHeaders","Parameters.containerAcl","Mappers.ContainerRestoreHeaders","Mappers.ContainerRestoreExceptionHeaders","Parameters.comp8","Parameters.deletedContainerName","Parameters.deletedContainerVersion","Mappers.ContainerRenameHeaders","Mappers.ContainerRenameExceptionHeaders","Parameters.comp9","Parameters.sourceContainerName","Parameters.sourceLeaseId","Mappers.ContainerSubmitBatchHeaders","Mappers.ContainerSubmitBatchExceptionHeaders","Mappers.ContainerFilterBlobsHeaders","Mappers.ContainerFilterBlobsExceptionHeaders","Mappers.ContainerAcquireLeaseHeaders","Mappers.ContainerAcquireLeaseExceptionHeaders","Parameters.comp10","Parameters.action","Parameters.duration","Parameters.proposedLeaseId","Mappers.ContainerReleaseLeaseHeaders","Mappers.ContainerReleaseLeaseExceptionHeaders","Parameters.action1","Parameters.leaseId1","Mappers.ContainerRenewLeaseHeaders","Mappers.ContainerRenewLeaseExceptionHeaders","Parameters.action2","Mappers.ContainerBreakLeaseHeaders","Mappers.ContainerBreakLeaseExceptionHeaders","Parameters.action3","Parameters.breakPeriod","Mappers.ContainerChangeLeaseHeaders","Mappers.ContainerChangeLeaseExceptionHeaders","Parameters.action4","Parameters.proposedLeaseId1","Mappers.ListBlobsFlatSegmentResponse","Mappers.ContainerListBlobFlatSegmentHeaders","Mappers.ContainerListBlobFlatSegmentExceptionHeaders","Parameters.include1","Mappers.ListBlobsHierarchySegmentResponse","Mappers.ContainerListBlobHierarchySegmentHeaders","Mappers.ContainerListBlobHierarchySegmentExceptionHeaders","Parameters.delimiter","Mappers.ContainerGetAccountInfoHeaders","Mappers.ContainerGetAccountInfoExceptionHeaders","Blob","Mappers.BlobDownloadHeaders","Mappers.BlobDownloadExceptionHeaders","Parameters.snapshot","Parameters.versionId","Parameters.range","Parameters.rangeGetContentMD5","Parameters.rangeGetContentCRC64","Parameters.encryptionKey","Parameters.encryptionKeySha256","Parameters.encryptionAlgorithm","Parameters.ifMatch","Parameters.ifNoneMatch","Parameters.ifTags","Mappers.BlobGetPropertiesHeaders","Mappers.BlobGetPropertiesExceptionHeaders","Mappers.BlobDeleteHeaders","Mappers.BlobDeleteExceptionHeaders","Parameters.blobDeleteType","Parameters.deleteSnapshots","Mappers.BlobUndeleteHeaders","Mappers.BlobUndeleteExceptionHeaders","Mappers.BlobSetExpiryHeaders","Mappers.BlobSetExpiryExceptionHeaders","Parameters.comp11","Parameters.expiryOptions","Parameters.expiresOn","Mappers.BlobSetHttpHeadersHeaders","Mappers.BlobSetHttpHeadersExceptionHeaders","Parameters.blobCacheControl","Parameters.blobContentType","Parameters.blobContentMD5","Parameters.blobContentEncoding","Parameters.blobContentLanguage","Parameters.blobContentDisposition","Mappers.BlobSetImmutabilityPolicyHeaders","Mappers.BlobSetImmutabilityPolicyExceptionHeaders","Parameters.comp12","Parameters.immutabilityPolicyExpiry","Parameters.immutabilityPolicyMode","Mappers.BlobDeleteImmutabilityPolicyHeaders","Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders","Mappers.BlobSetLegalHoldHeaders","Mappers.BlobSetLegalHoldExceptionHeaders","Parameters.comp13","Parameters.legalHold","Mappers.BlobSetMetadataHeaders","Mappers.BlobSetMetadataExceptionHeaders","Parameters.encryptionScope","Mappers.BlobAcquireLeaseHeaders","Mappers.BlobAcquireLeaseExceptionHeaders","Mappers.BlobReleaseLeaseHeaders","Mappers.BlobReleaseLeaseExceptionHeaders","Mappers.BlobRenewLeaseHeaders","Mappers.BlobRenewLeaseExceptionHeaders","Mappers.BlobChangeLeaseHeaders","Mappers.BlobChangeLeaseExceptionHeaders","Mappers.BlobBreakLeaseHeaders","Mappers.BlobBreakLeaseExceptionHeaders","Mappers.BlobCreateSnapshotHeaders","Mappers.BlobCreateSnapshotExceptionHeaders","Parameters.comp14","Mappers.BlobStartCopyFromURLHeaders","Mappers.BlobStartCopyFromURLExceptionHeaders","Parameters.tier","Parameters.rehydratePriority","Parameters.sourceIfModifiedSince","Parameters.sourceIfUnmodifiedSince","Parameters.sourceIfMatch","Parameters.sourceIfNoneMatch","Parameters.sourceIfTags","Parameters.copySource","Parameters.blobTagsString","Parameters.sealBlob","Parameters.legalHold1","Mappers.BlobCopyFromURLHeaders","Mappers.BlobCopyFromURLExceptionHeaders","Parameters.xMsRequiresSync","Parameters.sourceContentMD5","Parameters.copySourceAuthorization","Parameters.copySourceTags","Mappers.BlobAbortCopyFromURLHeaders","Mappers.BlobAbortCopyFromURLExceptionHeaders","Parameters.comp15","Parameters.copyId","Parameters.copyActionAbortConstant","Mappers.BlobSetTierHeaders","Mappers.BlobSetTierExceptionHeaders","Parameters.comp16","Parameters.tier1","Mappers.BlobGetAccountInfoHeaders","Mappers.BlobGetAccountInfoExceptionHeaders","Mappers.BlobQueryHeaders","Mappers.BlobQueryExceptionHeaders","Parameters.queryRequest","Parameters.comp17","Mappers.BlobTags","Mappers.BlobGetTagsHeaders","Mappers.BlobGetTagsExceptionHeaders","Parameters.comp18","Mappers.BlobSetTagsHeaders","Mappers.BlobSetTagsExceptionHeaders","Parameters.tags","Parameters.transactionalContentMD5","Parameters.transactionalContentCrc64","serializer","Mappers.PageBlobCreateHeaders","Mappers.PageBlobCreateExceptionHeaders","Parameters.blobType","Parameters.blobContentLength","Parameters.blobSequenceNumber","Mappers.PageBlobUploadPagesHeaders","Mappers.PageBlobUploadPagesExceptionHeaders","Parameters.body1","Parameters.comp19","Parameters.contentType1","Parameters.accept2","Parameters.pageWrite","Parameters.ifSequenceNumberLessThanOrEqualTo","Parameters.ifSequenceNumberLessThan","Parameters.ifSequenceNumberEqualTo","Mappers.PageBlobClearPagesHeaders","Mappers.PageBlobClearPagesExceptionHeaders","Parameters.pageWrite1","Mappers.PageBlobUploadPagesFromURLHeaders","Mappers.PageBlobUploadPagesFromURLExceptionHeaders","Parameters.sourceUrl","Parameters.sourceRange","Parameters.sourceContentCrc64","Parameters.range1","Mappers.PageList","Mappers.PageBlobGetPageRangesHeaders","Mappers.PageBlobGetPageRangesExceptionHeaders","Parameters.comp20","Mappers.PageBlobGetPageRangesDiffHeaders","Mappers.PageBlobGetPageRangesDiffExceptionHeaders","Parameters.prevsnapshot","Parameters.prevSnapshotUrl","Mappers.PageBlobResizeHeaders","Mappers.PageBlobResizeExceptionHeaders","Mappers.PageBlobUpdateSequenceNumberHeaders","Mappers.PageBlobUpdateSequenceNumberExceptionHeaders","Parameters.sequenceNumberAction","Mappers.PageBlobCopyIncrementalHeaders","Mappers.PageBlobCopyIncrementalExceptionHeaders","Parameters.comp21","Mappers.AppendBlobCreateHeaders","Mappers.AppendBlobCreateExceptionHeaders","Parameters.blobType1","Mappers.AppendBlobAppendBlockHeaders","Mappers.AppendBlobAppendBlockExceptionHeaders","Parameters.comp22","Parameters.maxSize","Parameters.appendPosition","Mappers.AppendBlobAppendBlockFromUrlHeaders","Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders","Parameters.sourceRange1","Mappers.AppendBlobSealHeaders","Mappers.AppendBlobSealExceptionHeaders","Parameters.comp23","Mappers.BlockBlobUploadHeaders","Mappers.BlockBlobUploadExceptionHeaders","Parameters.blobType2","Mappers.BlockBlobPutBlobFromUrlHeaders","Mappers.BlockBlobPutBlobFromUrlExceptionHeaders","Parameters.copySourceBlobProperties","Mappers.BlockBlobStageBlockHeaders","Mappers.BlockBlobStageBlockExceptionHeaders","Parameters.comp24","Parameters.blockId","Mappers.BlockBlobStageBlockFromURLHeaders","Mappers.BlockBlobStageBlockFromURLExceptionHeaders","Mappers.BlockBlobCommitBlockListHeaders","Mappers.BlockBlobCommitBlockListExceptionHeaders","Parameters.blocks","Parameters.comp25","Mappers.BlockList","Mappers.BlockBlobGetBlockListHeaders","Mappers.BlockBlobGetBlockListExceptionHeaders","Parameters.listType","createClientLogger","URLBuilder","isNode","BaseRequestPolicy","StorageRetryPolicyType","AbortError","HttpHeaders","os","DefaultHttpClient","delay","tracingPolicy","keepAlivePolicy","generateClientRequestIdPolicy","deserializationPolicy","logPolicy","proxyPolicy","disableResponseDecompressionPolicy","isTokenCredential","createHmac","createSpanFunction","SASProtocol","StorageBlob","generateUuid","SpanStatusCode","Readable","__await","BlockBlobTier","PremiumPageBlobTier","StorageBlobAudience","Poller","EventEmitter","fs","util","getDefaultProxySettings","__asyncValues","__asyncDelegator","bearerTokenAuthenticationPolicy","WebResource","KnownEncryptionAlgorithmType"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;AAMG;AAII,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,0BAA0B;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,oBAAoB,EAAE;AACpB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,UAAU;AACtB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,eAAe;AAC3B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAA6B;AACvD,IAAA,cAAc,EAAE,iBAAiB;AACjC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iBAAiB;AAC5B,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,WAAW,EAAE;AACX,oBAAA,gBAAgB,EAAE,CAAC;AACpB,iBAAA;AACD,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,WAAW,EAAE;AACX,oBAAA,gBAAgB,EAAE,CAAC;AACpB,iBAAA;AACD,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAA6B;AACrD,IAAA,cAAc,EAAE,eAAe;AAC/B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,eAAe;AAC1B,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,oBAAoB,EAAE;AACpB,gBAAA,cAAc,EAAE,sBAAsB;AACtC,gBAAA,OAAO,EAAE,sBAAsB;AAC/B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,qBAAqB;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,gBAAgB;AAC5B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA6B;AACtD,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gBAAgB;AAC3B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,WAAW,EAAE,aAAa,CAAC;AACpD,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,eAAe;AAC3B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAA6B;AACrD,IAAA,cAAc,EAAE,eAAe;AAC/B,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,eAAe;AAC1B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,qBAAqB;AACjC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,8BAA8B,EAAE;AAC9B,gBAAA,cAAc,EAAE,6BAA6B;AAC7C,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uCAAuC,EAAE;AACvC,gBAAA,cAAc,EAAE,uCAAuC;AACvD,gBAAA,OAAO,EAAE,uCAAuC;AAChD,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,gBAAgB;AAC5B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA6B;AACtD,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gBAAgB;AAC3B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,SAAS;AACrB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,OAAO,EAAE,KAAK;AACd,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,kBAAkB;AAClC,IAAA,OAAO,EAAE,kBAAkB;AAC3B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,EAAE,EAAE;AACF,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,cAAc;AAC1B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,qBAAqB;AACjC,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,kBAAkB;AAC9B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,kBAAkB;AAClC,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,wBAAwB;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,WAAW,EAAE,IAAI;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,OAAO,EAAE,YAAY;AACrB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,IAAI;wBACJ,IAAI;wBACJ,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,MAAM;wBACN,SAAS;wBACT,MAAM;AACP,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,0BAA0B;wBAC1B,2BAA2B;wBAC3B,2BAA2B;AAC5B,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,sBAAsB;AACtC,gBAAA,OAAO,EAAE,sBAAsB;AAC/B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,6BAA6B;AAC7C,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,0BAA0B;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAA6B;AAChE,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0BAA0B;AACrC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,kBAAkB;AAC9B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAA6B;AACvD,IAAA,cAAc,EAAE,iBAAiB;AACjC,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iBAAiB;AAC5B,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA6B;AACjD,IAAA,cAAc,EAAE,WAAW;AAC3B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,OAAO;AACnB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,OAAO;AACnB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA6B;AAC7C,IAAA,cAAc,EAAE,OAAO;AACvB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,OAAO;AAClB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,WAAW;AACvB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA6B;AACjD,IAAA,cAAc,EAAE,WAAW;AAC3B,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,eAAe,EAAE;AACf,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,OAAO,EAAE,YAAY;AACrB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,OAAO,EAAE,cAAc;AACvB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,aAAa;AACzB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAA6B;AACnD,IAAA,cAAc,EAAE,aAAa;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,aAAa;AACxB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,CAAC;AACzD,iBAAA;AACF,aAAA;AACD,YAAA,0BAA0B,EAAE;AAC1B,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,4BAA4B;AACxC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,uBAAuB;AACnC,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,KAAK;AACZ,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,OAAO,EAAE,4BAA4B;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,uBAAuB;AAChC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2CAA2C,GAA6B;AACnF,IAAA,cAAc,EAAE,8CAA8C;AAC9D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6CAA6C;AACxD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,8BAA8B,EAAE;AAC9B,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,gCAAgC;AAChD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,uCAAuC,EAAE;AACvC,gBAAA,cAAc,EAAE,gDAAgD;AAChE,gBAAA,OAAO,EAAE,gDAAgD;AACzD,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sCAAsC,GAA6B;AAC9E,IAAA,cAAc,EAAE,yCAAyC;AACzD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wCAAwC;AACnD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iDAAiD,GAA6B;AACzF,IAAA,cAAc,EAAE,oDAAoD;AACpE,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mDAAmD;AAC9D,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,sBAAsB;AACtC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,UAAU;AACnC,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAA6B;AAChE,IAAA,cAAc,EAAE,2BAA2B;AAC3C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0BAA0B;AACrC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,UAAU;AACnC,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,gCAAgC;AAChD,gBAAA,OAAO,EAAE,gCAAgC;AACzC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,sBAAsB;AACtC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAA6B;AAC5D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sBAAsB;AACjC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,gCAAgC;AAChD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yCAAyC,GAA6B;AACjF,IAAA,cAAc,EAAE,4CAA4C;AAC5D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2CAA2C;AACtD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,YAAY,EAAE,SAAS;AACvB,gBAAA,UAAU,EAAE,IAAI;AAChB,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0CAA0C,GAA6B;AAClF,IAAA,cAAc,EAAE,6CAA6C;AAC7D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4CAA4C;AACvD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yCAAyC,GAA6B;AACjF,IAAA,cAAc,EAAE,4CAA4C;AAC5D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2CAA2C;AACtD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0CAA0C,GAA6B;AAClF,IAAA,cAAc,EAAE,6CAA6C;AAC7D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4CAA4C;AACvD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7rQD;;;;;;AAMG;AAgBI,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAAuB;AACvD,IAAA,aAAa,EAAE,uBAAuB;AACtC,IAAA,MAAM,EAAEA,qBAA2B;CACpC,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,GAAG,GAA0B;AACxC,IAAA,aAAa,EAAE,KAAK;AACpB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,KAAK;AACrB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,KAAK;AACd,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;AACD,IAAA,YAAY,EAAE,IAAI;CACnB,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAA4B;AAC3C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA4B;AACvD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;AAC9C,IAAA,MAAM,EAAE;AACN,QAAA,WAAW,EAAE;AACX,YAAA,gBAAgB,EAAE,CAAC;AACpB,SAAA;AACD,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAA4B;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,WAAW,EAAE;AACX,YAAA,gBAAgB,EAAE,CAAC;AACpB,SAAA;AACD,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;IACD,gBAAgB,EAAEC,8BAAqB,CAAC,GAAG;CAC5C,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAEC,OAAa;CACtB,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,mBAAmB;AACjC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,MAAM;AACf,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,gBAAgB;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,sBAAsB;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;AACnC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,OAAO;AACvB,QAAA,OAAO,EAAE,OAAO;AAChB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,SAAA;AACD,QAAA,sBAAsB,EAAE,YAAY;AACrC,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,wBAAwB;AACzB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAAuB;AAChE,IAAA,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,gCAAgC;AACjC,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qCAAqC;AACrD,QAAA,OAAO,EAAE,qCAAqC;AAC9C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,uBAAuB,EAAE,SAAS,CAAC;AAC9D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,iBAAiB,CAAC;AACzE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,mBAAmB,CAAC;AAC3E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,KAAK;AACnB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,YAAY,EAAE,IAAI;AAClB,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,kBAAkB;AAC9B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;AAClD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gCAAgC;AAChD,QAAA,OAAO,EAAE,gCAAgC;AACzC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,qBAAqB;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,eAAe,CAAC;AAC3C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,OAAO,EAAE,sBAAsB;AAC/B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,iBAAiB;AAChC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,MAAM;wBACN,SAAS;wBACT,UAAU;wBACV,WAAW;wBACX,kBAAkB;wBAClB,UAAU;wBACV,MAAM;wBACN,oBAAoB;wBACpB,WAAW;wBACX,qBAAqB;AACtB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;IACD,gBAAgB,EAAED,8BAAqB,CAAC,GAAG;CAC5C,CAAC;AAEK,MAAM,SAAS,GAA4B;AAChD,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,UAAU;AAC1B,QAAA,OAAO,EAAE,UAAU;AACnB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA4B;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;AACnC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;AAClD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,8BAA8B;AAC9C,QAAA,OAAO,EAAE,8BAA8B;AACvC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,eAAe,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;AAC5D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;AAC5D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,SAAS,CAAC;AACjE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,UAAU;AAC1B,QAAA,OAAO,EAAE,UAAU;AACnB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,aAAa,CAAC;AACrE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,QAAQ,CAAC;AAChE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AACnC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA4B;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,oBAAoB;AAC7B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,CAAC;AACjE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;AAChE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;AAC/D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;AACpE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;AACpE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,wBAAwB,CAAC;AACvE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,sBAAsB;AACpC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qCAAqC;AACrD,QAAA,OAAO,EAAE,qCAAqC;AAC9C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,wBAAwB,CAAC;AACpD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,iBAAiB;AAC1B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AAClC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;gBACT,MAAM;AACP,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,mBAAmB,CAAC;AAC/C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAAuB;AACvD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,uBAAuB;AACxB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;AAC1B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iCAAiC;AACjD,QAAA,OAAO,EAAE,iCAAiC;AAC1C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,eAAe,CAAC;AAC7E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,OAAO,EAAE,sBAAsB;AAC/B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mBAAmB;AACpB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,cAAc,CAAC;AAC5E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,YAAY;AAC3B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,OAAO,EAAE,gBAAgB;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,OAAO,EAAE,iBAAiB;AAC1B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,iBAAiB;AAChC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;AAC9C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gCAAgC;AAChD,QAAA,OAAO,EAAE,gCAAgC;AACzC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AACnC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,yBAAyB;AACxC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;gBACT,MAAM;AACP,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAEE,YAAkB;CAC3B,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AAClC,IAAA,MAAM,EAAEC,QAAc;CACvB,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,aAAa;AAC7B,QAAA,OAAO,EAAE,aAAa;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAAuB;AAC3D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,2BAA2B,CAAC;AACvD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,OAAO,EAAE,oBAAoB;AAC7B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,mBAAmB;AAClC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,0BAA0B;AAC1C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,0BAA0B;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,0BAA0B;AACxC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,MAAM;AACf,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAAuB;AACnE,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mCAAmC;AACpC,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,0BAA0B;AAC3B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;AAC1B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,aAAa;AAC5B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,OAAO;AACtB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA4B;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,sBAAsB;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC;AAC9C,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,aAAa;AAC3B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,SAAS,CAAC;AACvE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,gBAAgB;AACjB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kCAAkC;AAClD,QAAA,OAAO,EAAE,kCAAkC;AAC3C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAEC,eAAqB;CAC9B,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC;AACnD,SAAA;AACF,KAAA;CACF;;ACpuDD;;;;;;AAMG;AA0BH;MACa,OAAO,CAAA;AAGlB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;AAKG;IACH,aAAa,CACX,qBAA4C,EAC5C,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,qBAAqB;YACrB,OAAO,EAAEC,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBC,4BAA0B,CACc,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,aAAa,CACX,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAED,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;KAC5C;AAED;;;AAGG;AACH,IAAA,qBAAqB,CACnB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACc,CAAC;KACpD;AAED;;;;;AAKG;IACH,oBAAoB,CAClB,OAAgB,EAChB,OAAmD,EAAA;AAEnD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACc,CAAC;KACnD;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBE,6BAA2B,CACc,CAAC;KAC7C;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAEF,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBG,0BAAwB,CACc,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,WAAW,CACT,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEH,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBI,0BAAwB,CACc,CAAC;KAC1C;AACF,CAAA;AACD;AACA,MAAMC,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEM,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEC,YAAoB;YAChC,aAAa,EAAEC,oCAA4C;AAC5D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,qBAAgC;AAC7C,IAAA,eAAe,EAAE;AACf,QAAAC,OAAkB;AAClB,QAAAC,IAAe;AACf,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEZ,eAAa;CAC1B,CAAC;AACF,MAAMJ,4BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEiB,qBAA6B;YACzC,aAAa,EAAEC,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEZ,YAAoB;YAChC,aAAa,EAAEa,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAV,OAAkB;AAClB,QAAAC,IAAe;AACf,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEiB,qBAA6B;YACzC,aAAa,EAAEC,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhB,YAAoB;YAChC,aAAa,EAAEiB,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAd,OAAkB;AAClB,QAAAE,gBAA2B;AAC3B,QAAAa,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACZ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;AACjE,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEqB,6BAAqC;YACjD,aAAa,EAAEC,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpB,YAAoB;YAChC,aAAa,EAAEqB,4CAAoD;AACpE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhB,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAACpB,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;AAChE,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE6B,iBAAyB;YACrC,aAAa,EAAEC,kCAA0C;AAC1D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5B,YAAoB;YAChC,aAAa,EAAE6B,2CAAmD;AACnE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,OAAkB;AAC/B,IAAA,eAAe,EAAE;AACf,QAAA3B,OAAkB;AAClB,QAAAE,gBAA2B;AAC3B,QAAA0B,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzB,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEZ,eAAa;CAC1B,CAAC;AACF,MAAMH,6BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEqC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhC,YAAoB;YAChC,aAAa,EAAEiC,qCAA6C;AAC7D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC7B,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMF,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEuC,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnC,YAAoB;YAChC,aAAa,EAAEoC,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,IAAe;IAC5B,eAAe,EAAE,CAAChC,gBAA2B,EAAEiC,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAAChC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE1C,eAAa;CAC1B,CAAC;AACF,MAAMD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE4C,iBAAyB;YACrC,aAAa,EAAEC,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1C,YAAoB;YAChC,aAAa,EAAE2C,kCAA0C;AAC1D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAtC,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAmB,KAAgB;AAChB,QAAAC,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B;;ACraD;;;;;;AAMG;AA4CH;MACa,SAAS,CAAA;AAGpB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,MAAM,CACJ,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBqD,qBAAmB,CACgB,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA8C,EAAA;AAE9C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAErD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBC,4BAA0B,CACgB,CAAC;KAC9C;AAED;;;;AAIG;AACH,IAAA,MAAM,CACJ,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAED,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBsD,qBAAmB,CACgB,CAAC;KACvC;AAED;;;AAGG;AACH,IAAA,WAAW,CACT,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEtD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBuD,0BAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,eAAe,CACb,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEvD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;;AAIG;AACH,IAAA,eAAe,CACb,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;AAGG;AACH,IAAA,OAAO,CACL,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACgB,CAAC;KACxC;AAED;;;;AAIG;IACH,MAAM,CACJ,mBAA2B,EAC3B,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,mBAAmB;YACnB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;KACvC;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,WAAW,CACT,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,YAAY,CACV,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBwD,2BAAyB,CACgB,CAAC;KAC7C;AAED;;;;;AAKG;IACH,YAAY,CACV,OAAe,EACf,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAExD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClByD,2BAAyB,CACgB,CAAC;KAC7C;AAED;;;;;AAKG;IACH,UAAU,CACR,OAAe,EACf,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEzD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB0D,yBAAuB,CACgB,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,UAAU,CACR,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE1D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB2D,yBAAuB,CACgB,CAAC;KAC3C;AAED;;;;;;;;AAQG;AACH,IAAA,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE3D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB4D,0BAAwB,CACgB,CAAC;KAC5C;AAED;;;AAGG;AACH,IAAA,mBAAmB,CACjB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE5D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,gCAAgC,CACgB,CAAC;KACpD;AAED;;;;;;;AAOG;IACH,wBAAwB,CACtB,SAAiB,EACjB,OAAyD,EAAA;AAEzD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACgB,CAAC;KACzD;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBE,6BAA2B,CACgB,CAAC;KAC/C;AACF,CAAA;AACD;AACA,MAAMG,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAMqD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEQ,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtD,YAAoB;YAChC,aAAa,EAAEuD,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClD,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAC,MAAiB;AACjB,QAAAC,sBAAiC;AACjC,QAAAC,8BAAyC;AAC1C,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE9D,eAAa;CAC1B,CAAC;AACF,MAAMJ,4BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmE,6BAAqC;AACrD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7D,YAAoB;YAChC,aAAa,EAAE8D,sCAA8C;AAC9D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzD,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjE,eAAa;CAC1B,CAAC;AACF,MAAMiD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiB,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhE,YAAoB;YAChC,aAAa,EAAEiE,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC5D,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErE,eAAa;CAC1B,CAAC;AACF,MAAMkD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoB,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpE,YAAoB;YAChC,aAAa,EAAEqE,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAc,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAAChE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEpE,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;wBACP,IAAI,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,SAAS,EAAE,kBAAkB,EAAE;AAC3D,qBAAA;AACF,iBAAA;AACD,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,kBAAkB;AACnC,aAAA;YACD,aAAa,EAAEyE,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvE,YAAoB;YAChC,aAAa,EAAEwE,wCAAgD;AAChE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAnE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAiB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjE,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE4E,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1E,YAAoB;YAChC,aAAa,EAAE2E,wCAAgD;AAChE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,YAAuB;AACpC,IAAA,eAAe,EAAE;AACf,QAAAvE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAiB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAgD,MAAiB;AACjB,QAAAK,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAErE,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+E,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7E,YAAoB;YAChC,aAAa,EAAE8E,gCAAwC;AACxD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAzE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAuB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAkE,oBAA+B;AAC/B,QAAAC,uBAAkC;AACnC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnF,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoF,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAElF,YAAoB;YAChC,aAAa,EAAEmF,+BAAuC;AACvD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA9E,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAA4B,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC9E,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAuE,mBAA8B;AAC9B,QAAAC,aAAwB;AACzB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAExF,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEyF,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvF,YAAoB;YAChC,aAAa,EAAEwF,oCAA4C;AAC5D,SAAA;AACF,KAAA;IACD,WAAW,EAAEnD,IAAe;AAC5B,IAAA,eAAe,EAAE;AACf,QAAAhC,gBAA2B;AAC3B,QAAAiC,KAAgB;AAChB,QAAAkB,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE1C,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE2C,iBAAyB;YACrC,aAAa,EAAEgD,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzF,YAAoB;YAChC,aAAa,EAAE0F,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAArF,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAmB,KAAgB;AAChB,QAAAC,KAAgB;AAChB,QAAAW,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMmD,2BAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0C,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3F,YAAoB;YAChC,aAAa,EAAE4F,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvF,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA2B,MAAiB;AACjB,QAAAC,QAAmB;AACnB,QAAAC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAElG,eAAa;CAC1B,CAAC;AACF,MAAMoD,2BAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+C,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjG,YAAoB;YAChC,aAAa,EAAEkG,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA7F,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAgC,OAAkB;AAClB,QAAAC,QAAmB;AACpB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtG,eAAa;CAC1B,CAAC;AACF,MAAMqD,yBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkD,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErG,YAAoB;YAChC,aAAa,EAAEsG,mCAA2C;AAC3D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAjG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAG,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzG,eAAa;CAC1B,CAAC;AACF,MAAMsD,yBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoD,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExG,YAAoB;YAChC,aAAa,EAAEyG,mCAA2C;AAC3D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAApG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAuC,OAAkB;AAClB,QAAAC,WAAsB;AACvB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE7G,eAAa;CAC1B,CAAC;AACF,MAAMuD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuD,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5G,YAAoB;YAChC,aAAa,EAAE6G,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAxG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAU,OAAkB;AAClB,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjH,eAAa;CAC1B,CAAC;AACF,MAAM,gCAAgC,GAA2B;AAC/D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEkH,4BAAoC;YAChD,aAAa,EAAEC,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjH,YAAoB;YAChC,aAAa,EAAEkH,4CAAoD;AACpE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA7G,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAA+B,QAAmB;AACnB,QAAA2D,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7G,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;AACpE,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEsH,iCAAyC;YACrD,aAAa,EAAEC,wCAAgD;AAChE,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErH,YAAoB;YAChC,aAAa,EAAEsH,iDAAyD;AACzE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAjH,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAA+B,QAAmB;AACnB,QAAA2D,QAAmB;AACnB,QAAAI,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACjH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMH,6BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6H,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExH,YAAoB;YAChC,aAAa,EAAEyH,uCAA+C;AAC/D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACrH,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B;;AC/5BD;;;;;;AAMG;AA0DH;MACa4H,MAAI,CAAA;AAGf;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,QAAQ,CACN,OAAoC,EAAA;AAEpC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEjI,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;KACpC;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAAyC,EAAA;AAEzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACW,CAAC;KACzC;AAED;;;;;;;;;;;;;;AAcG;AACH,IAAA,MAAM,CAAC,OAAkC,EAAA;AACvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACW,CAAC;KAClC;AAED;;;AAGG;AACH,IAAA,QAAQ,CACN,OAAoC,EAAA;AAEpC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;KACpC;AAED;;;;AAIG;IACH,SAAS,CACP,aAAgC,EAChC,OAAqC,EAAA;AAErC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,sBAAsB,CACW,CAAC;KACrC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;AAGG;AACH,IAAA,qBAAqB,CACnB,OAAiD,EAAA;AAEjD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACW,CAAC;KACjD;AAED;;;AAGG;AACH,IAAA,wBAAwB,CACtB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACW,CAAC;KACpD;AAED;;;;AAIG;IACH,YAAY,CACV,SAAkB,EAClB,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;AAIG;AACH,IAAA,WAAW,CACT,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,YAAY,CACV,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;;AAKG;IACH,YAAY,CACV,OAAe,EACf,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;;AAKG;IACH,UAAU,CACR,OAAe,EACf,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;KACtC;AAED;;;;;;;;AAQG;AACH,IAAA,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,UAAU,CACR,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;KACtC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;;;;;AAOG;IACH,gBAAgB,CACd,UAAkB,EAClB,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;KAC5C;AAED;;;;;;;;AAQG;IACH,WAAW,CACT,UAAkB,EAClB,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;;;AAMG;IACH,gBAAgB,CACd,MAAc,EACd,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;KAC5C;AAED;;;;;;;;AAQG;IACH,OAAO,CACL,IAAgB,EAChB,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;;AAIG;AACH,IAAA,KAAK,CAAC,OAAiC,EAAA;AACrC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kBAAkB,CACW,CAAC;KACjC;AAED;;;AAGG;AACH,IAAA,OAAO,CAAC,OAAmC,EAAA;AACzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AAED;;;AAGG;AACH,IAAA,OAAO,CAAC,OAAmC,EAAA;AACzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAM,qBAAqB,GAA2B;AACpD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEkI,mBAA2B;AAC3C,SAAA;AACD,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEA,mBAA2B;AAC3C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3H,YAAoB;YAChC,aAAa,EAAE4H,4BAAoC;AACpD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvH,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAC,kBAA6B;AAC7B,QAAAC,oBAA+B;AAC/B,QAAAC,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0I,wBAAgC;AAChD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExI,YAAoB;YAChC,aAAa,EAAEyI,iCAAyC;AACzD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAApI,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE4I,iBAAyB;AACzC,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1I,YAAoB;YAChC,aAAa,EAAE2I,0BAAkC;AAClD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAtI,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAAc,cAAyB;AAC1B,KAAA;AACD,IAAA,aAAa,EAAE,CAACtI,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAM,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE/I,eAAa;CAC1B,CAAC;AACF,MAAM,qBAAqB,GAA2B;AACpD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgJ,mBAA2B;AAC3C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE9I,YAAoB;YAChC,aAAa,EAAE+I,4BAAoC;AACpD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC1I,gBAA2B,EAAE0E,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAACzE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,sBAAsB,GAA2B;AACrD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkJ,oBAA4B;AAC5C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhJ,YAAoB;YAChC,aAAa,EAAEiJ,6BAAqC;AACrD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC5I,gBAA2B,EAAE6I,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC5I,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAqI,aAAwB;AACxB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtJ,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuJ,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErJ,YAAoB;YAChC,aAAa,EAAEsJ,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClJ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AAClC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE9J,eAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;AACjE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+J,gCAAwC;AACxD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7J,YAAoB;YAChC,aAAa,EAAE8J,yCAAiD;AACjE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzJ,gBAA2B,EAAE0J,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACzJ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAqD,iBAA4B;AAC5B,QAAA6F,wBAAmC;AACnC,QAAAC,sBAAiC;AAClC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnK,eAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;AACpE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoK,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAElK,YAAoB;YAChC,aAAa,EAAEmK,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC9J,gBAA2B,EAAE0J,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACzJ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsK,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpK,YAAoB;YAChC,aAAa,EAAEqK,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAChK,gBAA2B,EAAEiK,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAChK,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyJ,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzK,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0K,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExK,YAAoB;YAChC,aAAa,EAAEyK,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACpK,gBAA2B,EAAEiE,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAAChE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5K,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6K,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3K,YAAoB;YAChC,aAAa,EAAE4K,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACvK,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA2B,MAAiB;AACjB,QAAAC,QAAmB;AACnB,QAAAC,eAA0B;AAC1B,QAAAqC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+K,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7K,YAAoB;YAChC,aAAa,EAAE8K,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzK,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAgC,OAAkB;AAClB,QAAAC,QAAmB;AACnB,QAAAiC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE/K,YAAoB;YAChC,aAAa,EAAEgL,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC3K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAG,OAAkB;AAClB,QAAA8B,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmL,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjL,YAAoB;YAChC,aAAa,EAAEkL,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC7K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAU,OAAkB;AAClB,QAAAC,gBAA2B;AAC3B,QAAAsB,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEqL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnL,YAAoB;YAChC,aAAa,EAAEoL,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC/K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAuC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAA0B,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuL,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErL,YAAoB;YAChC,aAAa,EAAEsL,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjL,gBAA2B,EAAEkL,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACjL,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5K,eAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;AAC5D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0L,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExL,YAAoB;YAChC,aAAa,EAAEyL,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACpL,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAyB,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAyB,IAAe;AACf,QAAAC,iBAA4B;AAC5B,QAAAC,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAC,YAAuB;AACvB,QAAAC,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAC,QAAmB;AACnB,QAAAC,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtM,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuM,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErM,YAAoB;YAChC,aAAa,EAAEsM,+BAAuC;AACvD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACjM,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAyB,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAE,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAE,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAAG,eAA0B;AAC1B,QAAAC,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAC,cAAyB;AAC1B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5M,eAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;AAC5D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6M,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3M,YAAoB;YAChC,aAAa,EAAE4M,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvM,gBAA2B;AAC3B,QAAAwM,MAAiB;AACjB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxM,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAgJ,uBAAkC;AACnC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjN,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkN,kBAA0B;AAC1C,SAAA;AACD,QAAA,GAAG,EAAE;YACH,aAAa,EAAEA,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhN,YAAoB;YAChC,aAAa,EAAEiN,2BAAmC;AACnD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA5M,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAAoF,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC5M,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AACjB,QAAAoD,iBAA4B;AAC5B,QAAAwB,KAAgB;AACjB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErN,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsN,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpN,YAAoB;YAChC,aAAa,EAAEqN,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjN,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,kBAAkB,GAA2B;AACjD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEwN,gBAAwB;AACxC,SAAA;AACD,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEA,gBAAwB;AACxC,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtN,YAAoB;YAChC,aAAa,EAAEuN,yBAAiC;AACjD,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,YAAuB;AACpC,IAAA,eAAe,EAAE;AACf,QAAAnN,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAA4F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAqD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE4N,QAAgB;YAC5B,aAAa,EAAEC,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3N,YAAoB;YAChC,aAAa,EAAE4N,2BAAmC;AACnD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvN,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAA+F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgO,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE9N,YAAoB;YAChC,aAAa,EAAE+N,2BAAmC;AACnD,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,IAAe;AAC5B,IAAA,eAAe,EAAE;AACf,QAAA3N,gBAA2B;AAC3B,QAAAyH,SAAoB;AACpB,QAAA+F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAqD,OAAkB;AAClB,QAAAwE,MAAiB;AACjB,QAAA0F,uBAAkC;AAClC,QAAAC,yBAAoC;AACrC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEpO,eAAa;CAC1B;;AChwCD;;;;;;AAMG;AA4BH;MACa,QAAQ,CAAA;AAGnB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;;AAMG;AACH,IAAA,MAAM,CACJ,aAAqB,EACrB,iBAAyB,EACzB,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,iBAAiB;YACjB,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBqD,qBAAmB,CACe,CAAC;KACtC;AAED;;;;;AAKG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAErD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACe,CAAC;KAC3C;AAED;;;;AAIG;IACH,UAAU,CACR,aAAqB,EACrB,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACe,CAAC;KAC1C;AAED;;;;;;;;;;AAUG;IACH,kBAAkB,CAChB,SAAiB,EACjB,WAAmB,EACnB,aAAqB,EACrB,KAAa,EACb,OAAkD,EAAA;AAElD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,WAAW;YACX,aAAa;YACb,KAAK;YACL,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACe,CAAC;KAClD;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACe,CAAC;KAC7C;AAED;;;;AAIG;AACH,IAAA,iBAAiB,CACf,OAAiD,EAAA;AAEjD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACe,CAAC;KACjD;AAED;;;;;AAKG;IACH,MAAM,CACJ,iBAAyB,EACzB,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,iBAAiB;YACjB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;KACtC;AAED;;;;;;AAMG;IACH,oBAAoB,CAClB,oBAA8C,EAC9C,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,oBAAoB;YACpB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACe,CAAC;KACpD;AAED;;;;;;;;;;;AAWG;IACH,eAAe,CACb,UAAkB,EAClB,OAA+C,EAAA;AAE/C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACe,CAAC;KAC/C;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM0O,YAAU,GAAG,IAAI1O,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAMqD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpO,YAAoB;YAChC,aAAa,EAAEqO,8BAAsC;AACtD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAAChO,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAAkC,QAAmB;AACnB,QAAAC,iBAA4B;AAC5B,QAAAC,kBAA6B;AAC9B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE1O,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE2O,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzO,YAAoB;YAChC,aAAa,EAAE0O,mCAA2C;AAC3D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,KAAgB;IAC7B,eAAe,EAAE,CAACtO,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAG,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAC,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AACnC,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;gBACnBf,YAAU;CACX,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgB,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnP,YAAoB;YAChC,aAAa,EAAEoP,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC/O,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAG,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAsE,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AAClC,QAAAG,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEvP,eAAa;CAC1B,CAAC;AACF,MAAM,+BAA+B,GAA2B;AAC9D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEwP,iCAAyC;AACzD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtP,YAAoB;YAChC,aAAa,EAAEuP,0CAAkD;AAClE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClP,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAsC,SAAoB;AACpB,QAAAC,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AAClC,QAAAM,SAAoB;AACpB,QAAAC,WAAsB;AACtB,QAAAC,kBAA6B;AAC7B,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE7P,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE8P,QAAgB;YAC5B,aAAa,EAAEC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7P,YAAoB;YAChC,aAAa,EAAE8P,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAzP,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAoG,QAAmB;AACnB,QAAAkI,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzP,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAM,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,8BAA8B,GAA2B;AAC7D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE8P,QAAgB;YAC5B,aAAa,EAAEI,gCAAwC;AACxD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhQ,YAAoB;YAChC,aAAa,EAAEiQ,yCAAiD;AACjE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA5P,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAoG,QAAmB;AACnB,QAAAkI,MAAiB;AACjB,QAAAG,YAAuB;AACxB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC5P,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAM,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAA4H,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErQ,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsQ,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpQ,YAAoB;YAChC,aAAa,EAAEqQ,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjQ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAA6D,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzO,eAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;AAChE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEwQ,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtQ,YAAoB;YAChC,aAAa,EAAEuQ,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACnQ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAiG,kBAA6B;AAC7B,QAAAgC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE1Q,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE2Q,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzQ,YAAoB;YAChC,aAAa,EAAE0Q,uCAA+C;AAC/D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACrQ,gBAA2B,EAAEsQ,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACrQ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAA0D,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnM,eAAa;CAC1B;;ACzkBD;;;;;;AAMG;AAiBH;MACa,UAAU,CAAA;AAGrB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACiB,CAAC;KACxC;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACiB,CAAC;KAC7C;AAED;;;;;;;;AAQG;AACH,IAAA,kBAAkB,CAChB,SAAiB,EACjB,aAAqB,EACrB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACiB,CAAC;KACpD;AAED;;;;AAIG;AACH,IAAA,IAAI,CACF,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iBAAiB,CACiB,CAAC;KACtC;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM0O,YAAU,GAAG,IAAI1O,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmR,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5Q,YAAoB;YAChC,aAAa,EAAE6Q,gCAAwC;AACxD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACxQ,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAwB,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA0E,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhR,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiR,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE/Q,YAAoB;YAChC,aAAa,EAAEgR,qCAA6C;AAC7D,SAAA;AACF,KAAA;IACD,WAAW,EAAErC,KAAgB;IAC7B,eAAe,EAAE,CAACtO,gBAA2B,EAAE4Q,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC3Q,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAAoC,OAAkB;AAClB,QAAAC,cAAyB;AAC1B,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;gBACnBhD,YAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA2B;AAC9D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiD,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpR,YAAoB;YAChC,aAAa,EAAEqR,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAChR,gBAA2B,EAAE4Q,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC3Q,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAwB,uBAAkC;AAClC,QAAAuB,SAAoB;AACpB,QAAAE,kBAA6B;AAC7B,QAAAwB,OAAkB;AAClB,QAAAC,cAAyB;AACzB,QAAAG,YAAuB;AACxB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAExR,eAAa;CAC1B,CAAC;AACF,MAAM,iBAAiB,GAA2B;AAChD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEyR,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvR,YAAoB;YAChC,aAAa,EAAEwR,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACnR,gBAA2B,EAAEoR,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACnR,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAA6I,cAAyB;AAC1B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErR,eAAa;CAC1B;;ACxRD;;;;;;AAMG;AAuBH;MACa,SAAS,CAAA;AAGpB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;;;;AAQG;AACH,IAAA,MAAM,CACJ,aAAqB,EACrB,IAA8B,EAC9B,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;KACvC;AAED;;;;;;;;;;;;AAYG;AACH,IAAA,cAAc,CACZ,aAAqB,EACrB,UAAkB,EAClB,OAA+C,EAAA;AAE/C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;KAC/C;AAED;;;;;;;;AAQG;AACH,IAAA,UAAU,CACR,OAAe,EACf,aAAqB,EACrB,IAA8B,EAC9B,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;KAC3C;AAED;;;;;;;;;AASG;AACH,IAAA,iBAAiB,CACf,OAAe,EACf,aAAqB,EACrB,SAAiB,EACjB,OAAkD,EAAA;AAElD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACgB,CAAC;KAClD;AAED;;;;;;;;;;AAUG;IACH,eAAe,CACb,MAAuB,EACvB,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;;;;AAMG;IACH,YAAY,CACV,QAAuB,EACvB,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,QAAQ;YACR,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;KAC7C;AACF,CAAA;AACD;AACA,MAAM,aAAa,GAAG,IAAIA,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAIA,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiS,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1R,YAAoB;YAChC,aAAa,EAAE2R,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,WAAW,EAAEhD,KAAgB;AAC7B,IAAA,eAAe,EAAE,CAACtO,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA6B,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAA8C,SAAoB;AACrB,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEC,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7R,YAAoB;YAChC,aAAa,EAAE8R,uCAA+C;AAC/D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACzR,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAc,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAE,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAC,YAAuB;AACvB,QAAAC,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAM,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAC,cAAyB;AACzB,QAAAuB,uBAAkC;AAClC,QAAA2D,SAAoB;AACpB,QAAAG,wBAAmC;AACpC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEC,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhS,YAAoB;YAChC,aAAa,EAAEiS,mCAA2C;AAC3D,SAAA;AACF,KAAA;IACD,WAAW,EAAEtD,KAAgB;AAC7B,IAAA,eAAe,EAAE;AACf,QAAAtO,gBAA2B;AAC3B,QAAA6R,MAAiB;AACjB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7R,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAmE,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAsC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA2B;AAC7D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsD,iCAAyC;AACzD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpS,YAAoB;YAChC,aAAa,EAAEqS,0CAAkD;AAClE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhS,gBAA2B;AAC3B,QAAA6R,MAAiB;AACjB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7R,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAmE,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAsC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAA+C,SAAoB;AACpB,QAAAE,kBAA6B;AAC7B,QAAA4B,YAAuB;AACxB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgB,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtS,YAAoB;YAChC,aAAa,EAAEuS,wCAAgD;AAChE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,MAAiB;IAC9B,eAAe,EAAE,CAACnS,gBAA2B,EAAEoS,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACnS,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA+C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA6B,uBAAkC;AAClC,QAAAC,yBAAoC;AACrC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEwE,SAAiB;YAC7B,aAAa,EAAEC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3S,YAAoB;YAChC,aAAa,EAAE4S,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvS,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAA4K,MAAiB;AACjB,QAAAI,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvS,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B;;AC5cD;AAKA;;AAEG;MACU,MAAM,GAAGuK,2BAAkB,CAAC,cAAc;;ACRvD;AACA;AAEO,MAAM,WAAW,GAAW,SAAS,CAAC;AACtC,MAAM,eAAe,GAAW,YAAY,CAAC;AAE7C,MAAM,gCAAgC,GAAW,GAAG,GAAG,IAAI,GAAG,IAAI,CAAC;AACnE,MAAM,gCAAgC,GAAW,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;AACpE,MAAM,qBAAqB,GAAW,KAAK,CAAC;AAC5C,MAAM,+BAA+B,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAChE,MAAM,iCAAiC,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClE,MAAM,mCAAmC,GAAW,CAAC,CAAC;AAEtD,MAAM,eAAe,GAAW,GAAG,GAAG,IAAI,CAAC;AAClD;;AAEG;AACI,MAAM,kBAAkB,GAAsB,qCAAqC;AAEnF,MAAM,YAAY,GAAG;AAC1B,IAAA,UAAU,EAAE;AACV,QAAA,sBAAsB,EAAE,GAAG;AAC3B,QAAA,SAAS,EAAE,KAAK;AAChB,QAAA,QAAQ,EAAE,UAAU;AACpB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,OAAO,EAAE,SAAS;AACnB,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAG;AAC/B,IAAA,aAAa,EAAE,GAAG;AAClB,IAAA,aAAa,EAAE,GAAG;AAClB,IAAA,cAAc,EAAE,GAAG;AACnB,IAAA,kBAAkB,EAAE,GAAG;AACvB,IAAA,0BAA0B,EAAE,GAAG;CAChC,CAAC;AAEK,MAAM,eAAe,GAAG;AAC7B,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,oBAAoB,EAAE,QAAQ;AAC9B,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,UAAU,EAAE,YAAY;AACxB,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,WAAW,EAAE,aAAa;AAC1B,IAAA,yBAAyB,EAAE,2BAA2B;AACtD,IAAA,YAAY,EAAE,cAAc;AAC5B,IAAA,MAAM,EAAE,QAAQ;AAChB,IAAA,IAAI,EAAE,MAAM;AACZ,IAAA,QAAQ,EAAE,UAAU;AACpB,IAAA,iBAAiB,EAAE,mBAAmB;AACtC,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,mBAAmB,EAAE,qBAAqB;AAC1C,IAAA,kBAAkB,EAAE,OAAO;AAC3B,IAAA,KAAK,EAAE,OAAO;AACd,IAAA,UAAU,EAAE,YAAY;AACxB,IAAA,sBAAsB,EAAE,wBAAwB;AAChD,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,SAAS,EAAE,WAAW;AACtB,IAAA,eAAe,EAAE,iBAAiB;AAClC,IAAA,YAAY,EAAE,cAAc;CAC7B,CAAC;AAEK,MAAM,QAAQ,GAAG,EAAE,CAAC;AACpB,MAAM,OAAO,GAAG,GAAG,CAAC;AAEpB,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClC,MAAM,iBAAiB,GAAG,GAAG,CAAC;AAC9B,MAAM,0BAA0B,GAAG,CAAC,GAAG,SAAS,CAAC;AACjD,MAAM,gBAAgB,GAAG,MAAM,CAAC;AAChC,MAAM,gBAAgB,GAAG,UAAU,CAAC;AAEpC,MAAM,wBAAwB,GAAG,QAAQ,CAAC;AAE1C,MAAM,2BAA2B,GAAG,sNAAsN,CAAC;AAE3P,MAAM,oCAAoC,GAAG;IAClD,6BAA6B;IAC7B,eAAe;IACf,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,YAAY;IACZ,aAAa;IACb,mBAAmB;IACnB,YAAY;IACZ,wBAAwB;IACxB,WAAW;IACX,iBAAiB;IACjB,iBAAiB;IACjB,+BAA+B;IAC/B,cAAc;IACd,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,kBAAkB;IAClB,aAAa;IACb,eAAe;IACf,MAAM;IACN,eAAe;IACf,QAAQ;IACR,MAAM;IACN,oBAAoB;IACpB,kBAAkB;IAClB,2BAA2B;IAC3B,cAAc;IACd,oBAAoB;IACpB,kBAAkB;IAClB,8BAA8B;IAC9B,qBAAqB;IACrB,kBAAkB;IAClB,mBAAmB;IACnB,YAAY;IACZ,+BAA+B;IAC/B,uBAAuB;IACvB,eAAe;IACf,mBAAmB;IACnB,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,8BAA8B;IAC9B,2BAA2B;IAC3B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,yBAAyB;IACzB,iCAAiC;IACjC,+BAA+B;IAC/B,6BAA6B;IAC7B,+BAA+B;IAC/B,4BAA4B;IAC5B,4BAA4B;IAC5B,0BAA0B;IAC1B,uBAAuB;IACvB,wBAAwB;IACxB,yBAAyB;IACzB,2BAA2B;IAC3B,gBAAgB;IAChB,gCAAgC;IAChC,oBAAoB;IACpB,+BAA+B;IAC/B,uBAAuB;IACvB,4BAA4B;IAC5B,qCAAqC;IACrC,2BAA2B;IAC3B,4BAA4B;IAC5B,4BAA4B;IAC5B,4BAA4B;IAC5B,uBAAuB;IACvB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,eAAe;IACf,iBAAiB;IACjB,iBAAiB;IACjB,wBAAwB;IACxB,4BAA4B;IAC5B,yBAAyB;IACzB,6BAA6B;IAC7B,eAAe;IACf,yBAAyB;IACzB,sBAAsB;IACtB,+BAA+B;IAC/B,2BAA2B;IAC3B,iCAAiC;IACjC,gBAAgB;IAChB,4BAA4B;IAC5B,cAAc;IACd,qBAAqB;CACtB,CAAC;AAEK,MAAM,wCAAwC,GAAG;IACtD,MAAM;IACN,YAAY;IACZ,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,IAAI;IACJ,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,IAAI;IACJ,IAAI;IACJ,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,SAAS;IACT,eAAe;IACf,WAAW;IACX,cAAc;IACd,KAAK;IACL,OAAO;IACP,KAAK;IACL,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;CACX,CAAC;AAEK,MAAM,sCAAsC,GAAG,qCAAqC,CAAC;AACrF,MAAM,yCAAyC,GACpD,2CAA2C,CAAC;AAE9C;AACA;AACO,MAAM,cAAc,GAAG;IAC5B,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;IACP,OAAO;CACR;;AC5OD;AA2CA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmDG;AACG,SAAU,aAAa,CAAC,GAAW,EAAA;IACvC,MAAM,SAAS,GAAGC,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;AAC/B,IAAA,IAAI,GAAG,IAAI,IAAI,GAAG,CAAC;AAEnB,IAAA,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC;AACpB,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAExB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAWD,SAAS,4BAA4B,CAAC,gBAAwB,EAAA;;;IAG5D,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,KAAK,CAAC,CAAC,EAAE;;QAEjE,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACrD,QAAA,KAAK,MAAM,OAAO,IAAI,gBAAgB,EAAE;YACtC,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,6BAA6B,CAAC,EAAE;AAC5D,gBAAA,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,iCAAiC,CAAE,CAAC,CAAC,CAAC,CAAC;AACxE,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAEe,SAAA,oBAAoB,CAClC,gBAAwB,EACxB,QAM2B,EAAA;IAE3B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C,IAAA,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AACvC,YAAA,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAE,CAAC,CAAC,CAAC,CAAC;AACrD,SAAA;AACF,KAAA;AACD,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;;AAKG;AACG,SAAU,4BAA4B,CAAC,gBAAwB,EAAA;IACnE,IAAI,QAAQ,GAAG,EAAE,CAAC;AAElB,IAAA,IAAI,gBAAgB,CAAC,UAAU,CAAC,4BAA4B,CAAC,EAAE;;AAE7D,QAAA,QAAQ,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QAC1D,gBAAgB,GAAG,2BAA2B,CAAC;AAChD,KAAA;;IAGD,IAAI,YAAY,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;;;IAG1E,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC;IAErF,IACE,gBAAgB,CAAC,MAAM,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC;QAC3D,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAC7C;;QAGA,IAAI,wBAAwB,GAAG,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,EAAE,CAAC;QACrB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACrD,IAAI,cAAc,GAAG,EAAE,CAAC;;AAGxB,QAAA,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;AACpE,QAAA,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,gBAAgB,EAAE,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEzF,IAAI,CAAC,YAAY,EAAE;;;AAIjB,YAAA,wBAAwB,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,0BAA0B,CAAC,CAAC;AAC9F,YAAA,MAAM,QAAQ,GAAG,wBAAyB,CAAC,WAAW,EAAE,CAAC;AACzD,YAAA,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;AAC/C,gBAAA,MAAM,IAAI,KAAK,CACb,iGAAiG,CAClG,CAAC;AACH,aAAA;AAED,YAAA,cAAc,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;YAC1E,IAAI,CAAC,cAAc,EAAE;AACnB,gBAAA,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AAC7E,aAAA;YACD,YAAY,GAAG,GAAG,wBAAwB,CAAA,GAAA,EAAM,WAAW,CAAS,MAAA,EAAA,cAAc,EAAE,CAAC;AACtF,SAAA;QAED,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;AAC1E,SAAA;AAAM,aAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;AACzE,SAAA;QAED,OAAO;AACL,YAAA,IAAI,EAAE,mBAAmB;AACzB,YAAA,GAAG,EAAE,YAAY;YACjB,WAAW;YACX,UAAU;YACV,QAAQ;SACT,CAAC;AACH,KAAA;AAAM,SAAA;;QAGL,MAAM,UAAU,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,uBAAuB,CAAC,CAAC;QACnF,IAAI,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;;QAExE,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,WAAW,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;AACnD,SAAA;QACD,IAAI,CAAC,YAAY,EAAE;AACjB,YAAA,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;AAC/E,SAAA;aAAM,IAAI,CAAC,UAAU,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;AACxF,SAAA;AAED,QAAA,OAAO,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC;AAC9E,KAAA;AACH,CAAC;AAED;;;;AAIG;AACH,SAAS,MAAM,CAAC,IAAY,EAAA;IAC1B,OAAO,kBAAkB,CAAC,IAAI,CAAC;AAC5B,SAAA,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;AACpB,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAC1B,CAAC;AAED;;;;;;;AAOG;AACa,SAAA,eAAe,CAAC,GAAW,EAAE,IAAY,EAAA;IACvD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;AAC/B,IAAA,IAAI,GAAG,IAAI,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAA,EAAG,IAAI,CAAA,EAAG,IAAI,CAAA,CAAE,GAAG,CAAG,EAAA,IAAI,CAAI,CAAA,EAAA,IAAI,EAAE,IAAI,IAAI,CAAC;AACjF,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,MAAM,aAAa,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,CAAC;AAEpD,IAAA,OAAO,aAAa,CAAC,QAAQ,EAAE,CAAC;AAClC,CAAC;AAED;;;;;;;;AAQG;SACa,eAAe,CAAC,GAAW,EAAE,IAAY,EAAE,KAAc,EAAA;IACvE,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,SAAS,CAAC,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;AACzC,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;AAKG;AACa,SAAA,eAAe,CAAC,GAAW,EAAE,IAAY,EAAA;IACvD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;AAChD,CAAC;AAED;;;;;;AAMG;AACa,SAAA,UAAU,CAAC,GAAW,EAAE,IAAY,EAAA;IAClD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACxB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;AAIG;AACG,SAAU,UAAU,CAAC,GAAW,EAAA;IACpC,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,OAAO,EAAE,CAAC;AAC7B,CAAC;AAED;;;;AAIG;AACG,SAAU,YAAY,CAAC,GAAW,EAAA;IACtC,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC;AAC/B,CAAC;AAED;;;;AAIG;AACG,SAAU,kBAAkB,CAAC,GAAW,EAAA;IAC5C,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IACvC,IAAI,CAAC,UAAU,EAAE;AACf,QAAA,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAC;AACzD,KAAA;IAED,IAAI,WAAW,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;AAC7C,IAAA,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,IAAI,WAAW,KAAK,EAAE,EAAE;AACtB,QAAA,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,WAAW,GAAG,CAAA,CAAA,EAAI,WAAW,CAAE,CAAA,CAAC;AAC7E,KAAA;AAED,IAAA,OAAO,CAAG,EAAA,UAAU,CAAG,EAAA,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;AAIG;AACG,SAAU,aAAa,CAAC,GAAW,EAAA;IACvC,IAAI,WAAW,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;IACnD,IAAI,CAAC,WAAW,EAAE;AAChB,QAAA,OAAO,EAAE,CAAC;AACX,KAAA;AAED,IAAA,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,WAAW,CAAC;IAEhF,IAAI,eAAe,GAAa,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACvD,eAAe,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,KAAa,KAAI;QACzD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACxC,MAAM,gBAAgB,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAChD,QAAA,QACE,YAAY,GAAG,CAAC,IAAI,YAAY,KAAK,gBAAgB,IAAI,gBAAgB,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAC5F;AACJ,KAAC,CAAC,CAAC;IAEH,MAAM,OAAO,GAA8B,EAAE,CAAC;AAC9C,IAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;QAC5C,MAAM,YAAY,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAA,MAAM,GAAG,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,QAAA,MAAM,KAAK,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;AACtC,QAAA,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;AACtB,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;;;;AAMG;AACa,SAAA,gBAAgB,CAAC,GAAW,EAAE,UAAkB,EAAA;IAC9D,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,KAAK,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;AACjC,IAAA,IAAI,KAAK,EAAE;AACT,QAAA,KAAK,IAAI,GAAG,GAAG,UAAU,CAAC;AAC3B,KAAA;AAAM,SAAA;QACL,KAAK,GAAG,UAAU,CAAC;AACpB,KAAA;AAED,IAAA,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AAC1B,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;AAOG;SACa,oBAAoB,CAAC,IAAU,EAAE,mBAA4B,IAAI,EAAA;;AAE/E,IAAA,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;AAEtC,IAAA,OAAO,gBAAgB;AACrB,UAAE,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG;AAC/D,UAAE,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AAC3D,CAAC;AAED;;;;AAIG;AACG,SAAU,YAAY,CAAC,OAAe,EAAA;IAC1C,OAAO,CAACC,eAAM,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC3E,CAAC;AAWD;;;;AAIG;AACa,SAAA,eAAe,CAAC,aAAqB,EAAE,UAAkB,EAAA;;IAEvE,MAAM,qBAAqB,GAAG,EAAE,CAAC;;IAGjC,MAAM,mBAAmB,GAAG,CAAC,CAAC;AAE9B,IAAA,MAAM,6BAA6B,GAAG,qBAAqB,GAAG,mBAAmB,CAAC;AAElF,IAAA,IAAI,aAAa,CAAC,MAAM,GAAG,6BAA6B,EAAE;QACxD,aAAa,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,6BAA6B,CAAC,CAAC;AACvE,KAAA;IACD,MAAM,GAAG,GACP,aAAa;AACb,QAAA,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,EAAE,qBAAqB,GAAG,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AACrF,IAAA,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;AAMG;AACI,eAAe,KAAK,CACzB,QAAgB,EAChB,OAAyB,EACzB,UAAkB,EAAA;IAElB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;;AAE3C,QAAA,IAAI,OAAY,CAAC;QAEjB,MAAM,YAAY,GAAG,MAAK;YACxB,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,YAAY,CAAC,OAAO,CAAC,CAAC;AACvB,aAAA;YACD,MAAM,CAAC,UAAU,CAAC,CAAC;AACrB,SAAC,CAAC;QAEF,MAAM,cAAc,GAAG,MAAK;YAC1B,IAAI,OAAO,KAAK,SAAS,EAAE;AACzB,gBAAA,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACpD,aAAA;AACD,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC;AAEF,QAAA,OAAO,GAAG,UAAU,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QAE/C,IAAI,OAAO,KAAK,SAAS,EAAE;AACzB,YAAA,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACjD,SAAA;AACH,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;AAMG;AACG,SAAU,QAAQ,CACtB,aAAqB,EACrB,YAAoB,EACpB,YAAoB,GAAG,EAAA;;AAGvB,IAAA,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE;QAC7B,OAAO,aAAa,CAAC,QAAQ,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;AACxD,KAAA;AAED,IAAA,SAAS,GAAG,SAAS,IAAI,GAAG,CAAC;AAC7B,IAAA,IAAI,aAAa,CAAC,MAAM,GAAG,YAAY,EAAE;AACvC,QAAA,OAAO,aAAa,CAAC;AACtB,KAAA;AAAM,SAAA;AACL,QAAA,YAAY,GAAG,YAAY,GAAG,aAAa,CAAC,MAAM,CAAC;AACnD,QAAA,IAAI,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE;YACnC,SAAS,IAAI,SAAS,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;AAChE,SAAA;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,GAAG,aAAa,CAAC;AACzD,KAAA;AACH,CAAC;AAyBD;;;;;AAKG;AACa,SAAA,MAAM,CAAC,IAAY,EAAE,IAAY,EAAA;IAC/C,OAAO,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC/D,CAAC;AAED;;;;AAIG;AACG,SAAU,qBAAqB,CAAC,GAAW,EAAA;IAC/C,MAAM,SAAS,GAAeD,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACpD,IAAA,IAAI,WAAW,CAAC;IAChB,IAAI;AACF,QAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;AAEjD,YAAA,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,SAAA;AAAM,aAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;AAIvC,YAAA,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,SAAA;AAAM,aAAA;;YAEL,WAAW,GAAG,EAAE,CAAC;AAClB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC;AACpB,KAAA;AAAC,IAAA,OAAO,KAAU,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AAC7E,KAAA;AACH,CAAC;AAEK,SAAU,iBAAiB,CAAC,SAAqB,EAAA;AACrD,IAAA,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,EAAE;AACrC,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAED,IAAA,MAAM,IAAI,GACR,SAAS,CAAC,OAAO,EAAG,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,GAAG,EAAE,GAAG,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;;;;;AAM9F,IAAA,QACE,mJAAmJ,CAAC,IAAI,CACtJ,IAAI,CACL;AACD,SAAC,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAG,CAAC,CAAC,EACpF;AACJ,CAAC;AAED;;;;AAIG;AACG,SAAU,gBAAgB,CAAC,IAAW,EAAA;IAC1C,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,QAAQ,GAAG,EAAE,CAAC;AACpB,IAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;AACtB,QAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;AACnD,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;AACxB,YAAA,QAAQ,CAAC,IAAI,CAAC,CAAA,EAAG,kBAAkB,CAAC,GAAG,CAAC,CAAA,CAAA,EAAI,kBAAkB,CAAC,KAAK,CAAC,CAAA,CAAE,CAAC,CAAC;AAC1E,SAAA;AACF,KAAA;AAED,IAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED;;;;AAIG;AACG,SAAU,UAAU,CAAC,IAAW,EAAA;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AAED,IAAA,MAAM,GAAG,GAAa;AACpB,QAAA,UAAU,EAAE,EAAE;KACf,CAAC;AAEF,IAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;AACtB,QAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;AACnD,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;AACxB,YAAA,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC;gBAClB,GAAG;gBACH,KAAK;AACN,aAAA,CAAC,CAAC;AACJ,SAAA;AACF,KAAA;AACD,IAAA,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;AAIG;AACG,SAAU,MAAM,CAAC,IAAe,EAAA;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,GAAG,GAAS,EAAE,CAAC;AACrB,IAAA,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;QACrC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;AAClC,KAAA;AACD,IAAA,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;AAIG;AACG,SAAU,oBAAoB,CAClC,iBAIiC,EAAA;IAEjC,IAAI,iBAAiB,KAAK,SAAS,EAAE;AACnC,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,QAAQ,iBAAiB,CAAC,IAAI;AAC5B,QAAA,KAAK,KAAK;YACR,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,0BAA0B,EAAE;AAC1B,wBAAA,eAAe,EAAE,iBAAiB,CAAC,eAAe,IAAI,GAAG;AACzD,wBAAA,UAAU,EAAE,iBAAiB,CAAC,UAAU,IAAI,EAAE;wBAC9C,eAAe,EAAE,iBAAiB,CAAC,eAAe;AAClD,wBAAA,UAAU,EAAE,iBAAiB,CAAC,eAAe,IAAI,EAAE;AACnD,wBAAA,cAAc,EAAE,iBAAiB,CAAC,UAAU,IAAI,KAAK;AACtD,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,MAAM;YACT,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,qBAAqB,EAAE;wBACrB,eAAe,EAAE,iBAAiB,CAAC,eAAe;AACnD,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,OAAO;YACV,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,OAAO;AACb,oBAAA,kBAAkB,EAAE;wBAClB,MAAM,EAAE,iBAAiB,CAAC,MAAM;AACjC,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,SAAS;YACZ,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;aACF,CAAC;AAEJ,QAAA;AACE,YAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACtD,KAAA;AACH,CAAC;AAEK,SAAU,4BAA4B,CAC1C,uBAAgD,EAAA;IAEhD,IAAI,CAAC,uBAAuB,EAAE;AAC5B,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,IAAI,WAAW,IAAI,uBAAuB,EAAE;;;AAG1C,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,YAAY,GAA8B,EAAE,CAAC;AACnD,IAAA,KAAK,MAAM,GAAG,IAAI,uBAAuB,EAAE;QACzC,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,MAAM,YAAY,GAAG,KAAK,CAAC;QAC3B,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;AACnC,YAAA,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;AAChD,SAAA;AACD,QAAA,MAAM,IAAI,GAA0B;AAClC,YAAA,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;AACd,YAAA,iBAAiB,EAAE,uBAAuB,CAAC,GAAG,CAA4B;SAC3E,CAAC;QACF,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACnF,QAAA,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE;YACpB,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5C,SAAA;AAAM,aAAA;YACL,YAAY,CAAC,IAAI,CAAC;AAChB,gBAAA,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;gBAChB,KAAK,EAAE,CAAC,IAAI,CAAC;AACd,aAAA,CAAC,CAAC;AACJ,SAAA;AACF,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;AAKG;AACa,SAAA,gBAAgB,CAAI,KAAQ,EAAE,UAA2B,EAAA;AACtE,IAAA,KAAa,CAAC,UAAU,GAAG,UAAU,CAAC;AACvC,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAEK,SAAU,yBAAyB,CACvC,iBAAqC,EAAA;AAErC,IAAA,OAAO,iBAAiB,GAAG,iBAAiB,CAAC,MAAM,GAAG,GAAG,GAAG,iBAAiB,CAAC,KAAK,GAAG,SAAS,CAAC;AAClG,CAAC;AAEK,SAAU,gBAAgB,CAAC,IAAc,EAAA;IAC7C,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,QAAA,OAAO,kBAAkB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;AAC1C,KAAA;AAAM,SAAA;QACL,OAAO,IAAI,CAAC,OAAQ,CAAC;AACtB,KAAA;AACH,CAAC;AAEK,SAAU,qCAAqC,CACnD,gBAA8C,EAAA;IAE9C,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,gBAAgB,CACnB,EAAA,EAAA,OAAO,EAAE;AACP,YAAA,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AACpE,gBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;AACF,gBAAA,OAAO,QAAQ,CAAC;AAClB,aAAC,CAAC;SACH,EACD,CAAA,CAAA;AACJ,CAAC;AAEK,SAAU,0CAA0C,CACxD,gBAAmD,EAAA;;IAEnD,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,gBAAgB,CACnB,EAAA,EAAA,OAAO,EAAE;AACP,YAAA,YAAY,EAAE,CAAA,EAAA,GAAA,gBAAgB,CAAC,OAAO,CAAC,YAAY,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,GAAG,CAAC,CAAC,kBAAkB,KAAI;AAC9E,gBAAA,MAAM,UAAU,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACX,kBAAkB,CAAA,EAAA,EACrB,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC,GAChD,CAAC;AACF,gBAAA,OAAO,UAAU,CAAC;AACpB,aAAC,CAAC;AACF,YAAA,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AACpE,gBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;AACF,gBAAA,OAAO,QAAQ,CAAC;AAClB,aAAC,CAAC;SACH,EACD,CAAA,CAAA;AACJ,CAAC;AAEc,UAAE,yBAAyB,CACxC,oBAA4D,EAAA;IAE5D,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAiB,EAAE,CAAC;IAElC,IAAI,oBAAoB,CAAC,SAAS;AAAE,QAAA,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;IAC/E,IAAI,oBAAoB,CAAC,UAAU;AAAE,QAAA,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAElF,IAAI,cAAc,GAAG,CAAC,CAAC;IACvB,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,IAAI,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE;AAC/E,QAAA,IAAI,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE;YACvE,MAAM;AACJ,gBAAA,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;AACtC,gBAAA,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;AAClC,gBAAA,OAAO,EAAE,KAAK;aACf,CAAC;AACF,YAAA,EAAE,cAAc,CAAC;AAClB,SAAA;AAAM,aAAA;YACL,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;AACxC,gBAAA,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;AACpC,gBAAA,OAAO,EAAE,IAAI;aACd,CAAC;AACF,YAAA,EAAE,eAAe,CAAC;AACnB,SAAA;AACF,KAAA;IAED,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,cAAc,EAAE;QAC1D,MAAM;AACJ,YAAA,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;AACtC,YAAA,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;AAClC,YAAA,OAAO,EAAE,KAAK;SACf,CAAC;AACH,KAAA;IAED,OAAO,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,eAAe,EAAE;QAC7D,MAAM;AACJ,YAAA,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;AACxC,YAAA,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;AACpC,YAAA,OAAO,EAAE,IAAI;SACd,CAAC;AACH,KAAA;AACH,CAAC;AAED;;AAEG;AACG,SAAU,UAAU,CAAC,QAAgB,EAAA;IACzC,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAClC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,KAAK,CAAC,CAAC,CAAC,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,KAAA;AACD,IAAA,OAAO,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB;;AC93BA;AAeA;;;;;;;;;;AAUG;AACG,MAAO,oBAAqB,SAAQE,0BAAiB,CAAA;AACzD;;;;AAIG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,IAAID,eAAM,EAAE;YACV,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;YACrF,OAAO,CAAC,GAAG,GAAG,eAAe,CAC3B,OAAO,CAAC,GAAG,EACX,YAAY,CAAC,UAAU,CAAC,sBAAsB,EAC9C,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAChC,CAAC;AACH,SAAA;QAED,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;;QAG/C,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;QAEvD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;AC/DD;AAOA;;AAEG;MACU,2BAA2B,CAAA;AACtC;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;AACpE,QAAA,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KACtD;AACF;;ACpBD;AAkCA;;AAEG;AACSE,wCASX;AATD,CAAA,UAAY,sBAAsB,EAAA;AAChC;;AAEG;AACH,IAAA,sBAAA,CAAA,sBAAA,CAAA,aAAA,CAAA,GAAA,CAAA,CAAA,GAAA,aAAW,CAAA;AACX;;AAEG;AACH,IAAA,sBAAA,CAAA,sBAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AACP,CAAC,EATWA,8BAAsB,KAAtBA,8BAAsB,GASjC,EAAA,CAAA,CAAA,CAAA;AAED;AACA,MAAM,qBAAqB,GAAwB;IACjD,iBAAiB,EAAE,GAAG,GAAG,IAAI;AAC7B,IAAA,QAAQ,EAAE,CAAC;IACX,cAAc,EAAE,CAAC,GAAG,IAAI;IACxB,eAAe,EAAEA,8BAAsB,CAAC,WAAW;AACnD,IAAA,aAAa,EAAE,EAAE;IACjB,cAAc,EAAE,SAAS;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAIC,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AAEvE;;AAEG;AACG,MAAO,kBAAmB,SAAQF,0BAAiB,CAAA;AAMvD;;;;;;AAMG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,eAAoC,qBAAqB,EAAA;AAEzD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;;QAG3B,IAAI,CAAC,YAAY,GAAG;YAClB,eAAe,EAAE,YAAY,CAAC,eAAe;kBACzC,YAAY,CAAC,eAAe;kBAC5B,qBAAqB,CAAC,eAAe;YAEzC,QAAQ,EACN,YAAY,CAAC,QAAQ,IAAI,YAAY,CAAC,QAAQ,IAAI,CAAC;kBAC/C,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC;kBACjC,qBAAqB,CAAC,QAAQ;YAEpC,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;kBAC3D,YAAY,CAAC,cAAc;kBAC3B,qBAAqB,CAAC,cAAc;YAE1C,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;kBAC3D,IAAI,CAAC,GAAG,CACN,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB;sBAC1B,YAAY,CAAC,iBAAiB;AAChC,sBAAE,qBAAqB,CAAC,iBAAkB,CAC7C;kBACD,qBAAqB,CAAC,cAAc;YAE1C,iBAAiB,EACf,YAAY,CAAC,iBAAiB,IAAI,YAAY,CAAC,iBAAiB,IAAI,CAAC;kBACjE,YAAY,CAAC,iBAAiB;kBAC9B,qBAAqB,CAAC,iBAAiB;YAE7C,aAAa,EAAE,YAAY,CAAC,aAAa;kBACrC,YAAY,CAAC,aAAa;kBAC1B,qBAAqB,CAAC,aAAa;SACxC,CAAC;KACH;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;KACnD;AAED;;;;;;;;;AASG;AACO,IAAA,MAAM,kBAAkB,CAChC,OAAoB,EACpB,eAAwB,EACxB,OAAe,EAAA;AAEf,QAAA,MAAM,UAAU,GAAgB,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhD,MAAM,cAAc,GAClB,eAAe;AACf,YAAA,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa;AAChC,YAAA,EAAE,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,CAAC;AACxF,YAAA,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;QAEpB,IAAI,CAAC,cAAc,EAAE;AACnB,YAAA,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,aAAc,CAAC,CAAC;AAC/E,SAAA;;AAGD,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE;AACpC,YAAA,UAAU,CAAC,GAAG,GAAG,eAAe,CAC9B,UAAU,CAAC,GAAG,EACd,YAAY,CAAC,UAAU,CAAC,OAAO,EAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,cAAe,GAAG,IAAI,CAAC,CAAC,QAAQ,EAAE,CAChE,CAAC;AACH,SAAA;AAED,QAAA,IAAI,QAA2C,CAAC;QAChD,IAAI;AACF,YAAA,MAAM,CAAC,IAAI,CAAC,CAA2B,wBAAA,EAAA,OAAO,IAAI,cAAc,GAAG,SAAS,GAAG,WAAW,CAAA,CAAE,CAAC,CAAC;YAC9F,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC1D,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE;AACxD,gBAAA,OAAO,QAAQ,CAAC;AACjB,aAAA;AAED,YAAA,eAAe,GAAG,eAAe,KAAK,CAAC,cAAc,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC;AACnF,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,MAAM,CAAC,KAAK,CAAC,CAAA,oCAAA,EAAuC,GAAG,CAAC,OAAO,CAAA,QAAA,EAAW,GAAG,CAAC,IAAI,CAAA,CAAE,CAAC,CAAC;AACtF,YAAA,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,EAAE;AAC7D,gBAAA,MAAM,GAAG,CAAC;AACX,aAAA;AACF,SAAA;AAED,QAAA,MAAM,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;KACrE;AAED;;;;;;;AAOG;AACO,IAAA,WAAW,CACnB,cAAuB,EACvB,OAAe,EACf,QAAgC,EAChC,GAAe,EAAA;AAEf,QAAA,IAAI,OAAO,IAAI,IAAI,CAAC,YAAY,CAAC,QAAS,EAAE;YAC1C,MAAM,CAAC,IAAI,CACT,CAAA,wBAAA,EAA2B,OAAO,CAAgB,aAAA,EAAA,IAAI,CAAC,YAAY;iBAChE,QAAS,CAAA,iBAAA,CAAmB,CAChC,CAAC;AACF,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;;;AAID,QAAA,MAAM,eAAe,GAAG;YACtB,WAAW;YACX,iBAAiB;YACjB,cAAc;YACd,YAAY;YACZ,QAAQ;YACR,WAAW;YACX,SAAS;YACT,OAAO;AACP,YAAA,oBAAoB;SACrB,CAAC;AACF,QAAA,IAAI,GAAG,EAAE;AACP,YAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IACE,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAC/C,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;AAClD,qBAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,KAAK,cAAc,CAAC,EAClE;AACA,oBAAA,MAAM,CAAC,IAAI,CAAC,8BAA8B,cAAc,CAAA,mBAAA,CAAqB,CAAC,CAAC;AAC/E,oBAAA,OAAO,IAAI,CAAC;AACb,iBAAA;AACF,aAAA;AACF,SAAA;;;;QAKD,IAAI,QAAQ,IAAI,GAAG,EAAE;YACnB,MAAM,UAAU,GAAG,QAAQ,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,GAAG,GAAG,CAAC,UAAU,GAAG,CAAC,CAAC;AACzE,YAAA,IAAI,CAAC,cAAc,IAAI,UAAU,KAAK,GAAG,EAAE;AACzC,gBAAA,MAAM,CAAC,IAAI,CAAC,CAAA,mDAAA,CAAqD,CAAC,CAAC;AACnE,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;;AAGD,YAAA,IAAI,UAAU,KAAK,GAAG,IAAI,UAAU,KAAK,GAAG,EAAE;AAC5C,gBAAA,MAAM,CAAC,IAAI,CAAC,2CAA2C,UAAU,CAAA,CAAA,CAAG,CAAC,CAAC;AACtE,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;QAED,IAAI,CAAA,GAAG,KAAA,IAAA,IAAH,GAAG,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAH,GAAG,CAAE,IAAI,MAAK,aAAa,KAAI,GAAG,aAAH,GAAG,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAH,GAAG,CAAE,OAAO,CAAC,UAAU,CAAC,CAAA,+BAAA,CAAiC,CAAC,CAAA,EAAE;AAC7F,YAAA,MAAM,CAAC,IAAI,CACT,iFAAiF,CAClF,CAAC;AACF,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAED,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;;AAMG;AACK,IAAA,MAAM,KAAK,CAAC,cAAuB,EAAE,OAAe,EAAE,WAA6B,EAAA;QACzF,IAAI,aAAa,GAAW,CAAC,CAAC;AAE9B,QAAA,IAAI,cAAc,EAAE;AAClB,YAAA,QAAQ,IAAI,CAAC,YAAY,CAAC,eAAe;gBACvC,KAAKC,8BAAsB,CAAC,WAAW;AACrC,oBAAA,aAAa,GAAG,IAAI,CAAC,GAAG,CACtB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,YAAY,CAAC,cAAe,EAClE,IAAI,CAAC,YAAY,CAAC,iBAAkB,CACrC,CAAC;oBACF,MAAM;gBACR,KAAKA,8BAAsB,CAAC,KAAK;AAC/B,oBAAA,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,CAAC;oBAClD,MAAM;AACT,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;AACtC,SAAA;AAED,QAAA,MAAM,CAAC,IAAI,CAAC,0BAA0B,aAAa,CAAA,EAAA,CAAI,CAAC,CAAC;QACzD,OAAO,KAAK,CAAC,aAAa,EAAE,WAAW,EAAE,iBAAiB,CAAC,CAAC;KAC7D;AACF;;ACjSD;AAyDA;;AAEG;MACU,yBAAyB,CAAA;AAGpC;;;AAGG;AACH,IAAA,WAAA,CAAY,YAAkC,EAAA;AAC5C,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;AAED;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;KACvE;AACF;;AChFD;AAKA;;;AAGG;AACG,MAAgB,gBAAiB,SAAQD,0BAAiB,CAAA;AAC9D;;;;AAIG;AACI,IAAA,WAAW,CAAC,OAAoB,EAAA;AACrC,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;KAChE;AAED;;;;;AAKG;AACO,IAAA,WAAW,CAAC,OAAoB,EAAA;;;AAGxC,QAAA,OAAO,OAAO,CAAC;KAChB;AACF;;AC9BD;AAOA;;;AAGG;AACG,MAAO,yBAA0B,SAAQ,gBAAgB,CAAA;AAC7D;;;;AAIG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AACF;;ACtBD;AACA;AAKA;;;AAGG;MACmB,UAAU,CAAA;AAC9B;;;;;AAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B,EAAA;AACtE,QAAA,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;KACtE;AACF;;ACpBD;AAQA;;;;;AAKG;AACG,MAAO,mBAAoB,SAAQ,UAAU,CAAA;AACjD;;;;;AAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;AAE7B,QAAA,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC3D;AACF;;AC3BD;AAeA;;AAEG;AACG,MAAO,eAAgB,SAAQA,0BAAiB,CAAA;AAMpD;;;;;AAKG;AACH,IAAA,WAAA,CAAY,UAAyB,EAAE,OAA6B,EAAE,SAAiB,EAAA;AACrF,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;KAC5B;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,IAAID,eAAM,EAAE;AACV,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,gBAAA,OAAO,CAAC,OAAO,GAAG,IAAII,oBAAW,EAAE,CAAC;AACrC,aAAA;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;AACpD,gBAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AACjE,aAAA;AACF,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACpDD;AAeA;;AAEG;MACU,sBAAsB,CAAA;AAMjC;;;AAGG;AACH,IAAA,WAAA,CAAY,SAA4B,EAAA;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;AAEnC,QAAA,IAAIJ,eAAM,EAAE;AACV,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;AACxD,gBAAA,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;AAC/E,oBAAA,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;AACrC,iBAAA;AACF,aAAA;;AAGD,YAAA,MAAM,OAAO,GAAG,CAAwB,qBAAA,EAAA,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;AACzC,gBAAA,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AAC7B,aAAA;;AAGD,YAAA,IAAI,WAAW,GAAG,CAAA,cAAA,EAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;AACtD,YAAA,IAAIK,aAAE,EAAE;AACN,gBAAA,WAAW,GAAG,CAAiB,cAAA,EAAA,OAAO,CAAC,OAAO,KAAKA,aAAE,CAAC,IAAI,EAAE,IAAIA,aAAE,CAAC,OAAO,EAAE,GAAG,CAAC;AACjF,aAAA;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;AAC7C,gBAAA,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AACjC,aAAA;AACF,SAAA;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAChD;AAED;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;KACvE;AACF;;ACnED;AAMA,MAAM,kBAAkB,GAAG,IAAIC,0BAAiB,EAAE,CAAC;SAEnC,0BAA0B,GAAA;AACxC,IAAA,OAAO,kBAAkB,CAAC;AAC5B;;ACVA;AAcA;;AAEG;AACH,MAAM,SAAS,GAAG;AAChB,IAAA,YAAY,EAAE,WAAW;AACzB;;AAEG;AACH,IAAA,eAAe,EAAE;AACf;;AAEG;AACH,QAAA,aAAa,EAAE,eAAe;AAC/B,KAAA;CACF,CAAC;AAiCF;AACA,MAAM,sBAAsB,GAAuB;AACjD,IAAA,uBAAuB,EAAE,IAAI;AAC7B,IAAA,iBAAiB,EAAE,IAAI;AACvB,IAAA,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;AAYG;AACH,eAAe,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB,EAAA;;;AAInB,IAAA,eAAe,iBAAiB,GAAA;AAC9B,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;AAC/B,aAAA;YAAC,OAAM,EAAA,EAAA;AACN,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;;YAG1C,IAAI,UAAU,KAAK,IAAI,EAAE;AACvB,gBAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,OAAO,UAAU,CAAC;AACnB,SAAA;KACF;AAED,IAAA,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;AACrB,QAAA,MAAMC,cAAK,CAAC,iBAAiB,CAAC,CAAC;AAE/B,QAAA,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;AACnC,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;AAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD,EAAA;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;AAErC,IAAA,MAAM,OAAO,GACR,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,sBAAsB,CACtB,EAAA,kBAAkB,CACtB,CAAC;AAEF;;;AAGG;AACH,IAAA,MAAM,MAAM,GAAG;AACb;;AAEG;AACH,QAAA,IAAI,YAAY,GAAA;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;SAC/B;AACD;;;AAGG;AACH,QAAA,IAAI,aAAa,GAAA;;AACf,YAAA,QACE,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,CAAA,EAAA,GAAA,KAAK,KAAL,IAAA,IAAA,KAAK,uBAAL,KAAK,CAAE,kBAAkB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,CAAC,IAAI,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzE;SACH;AACD;;;AAGG;AACH,QAAA,IAAI,WAAW,GAAA;AACb,YAAA,QACE,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzF;SACH;KACF,CAAC;AAEF;;;AAGG;IACH,SAAS,OAAO,CAAC,eAAgC,EAAA;;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;AAExB,YAAA,MAAM,iBAAiB,GAAG,MACxB,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;;;AAI/C,YAAA,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;;AAEzB,YAAA,CAAA,EAAA,GAAA,KAAK,KAAA,IAAA,IAAL,KAAK,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAL,KAAK,CAAE,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,IAAI,CAAC,GAAG,EAAE,CACxC;AACE,iBAAA,IAAI,CAAC,CAAC,MAAM,KAAI;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;AACf,gBAAA,OAAO,KAAK,CAAC;AACf,aAAC,CAAC;AACD,iBAAA,KAAK,CAAC,CAAC,MAAM,KAAI;;;;gBAIhB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;AACb,gBAAA,MAAM,MAAM,CAAC;AACf,aAAC,CAAC,CAAC;AACN,SAAA;AAED,QAAA,OAAO,aAAqC,CAAC;KAC9C;AAED,IAAA,OAAO,OAAO,YAA6B,KAA0B;;;;;;;;;;QAWnE,IAAI,MAAM,CAAC,WAAW;AAAE,YAAA,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;AACvB,SAAA;AAED,QAAA,OAAO,KAAoB,CAAC;AAC9B,KAAC,CAAC;AACJ,CAAC;AACD;;;AAGG;AACH,SAAS,YAAY,CAAC,QAA+B,EAAA;IACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;AAC3D,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE;AACxC,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IACD,OAAO;AACT,CAAC;AAUD;;;;;AAKG;AACH,SAAS,cAAc,CAAC,SAAiB,EAAA;IACvC,MAAM,eAAe,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC1D,MAAM,cAAc,GAAG,CAAG,EAAA,eAAe,CAAC,IAAI,EAAE,CAAG,CAAA,CAAA,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAChF,IAAA,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,KAChD,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACnE,CAAC;;AAEF,IAAA,OAAO,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAM,CAAC,CAAK,EAAA,CAAC,EAAG,EAAE,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED;AAEA;;;;;;AAMG;AAEa,SAAA,+CAA+C,CAC7D,UAA2B,EAC3B,MAAyB,EAAA;;IAGzB,IAAI,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAErD,MAAM,+CAAgD,SAAQN,0BAAiB,CAAA;QAC7E,WAAmB,CAAA,UAAyB,EAAE,OAA6B,EAAA;AACzE,YAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC5B;QAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;AACH,aAAA;YAED,MAAM,gBAAgB,GAAG,QAAQ,CAAC;AAClC,YAAA,MAAM,KAAK,GAAG,CACZ,MAAM,gBAAgB,CAAC;gBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,gBAAA,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,iBAAA;aACF,CAAC,EACF,KAAK,CAAC;AACR,YAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,CAAA,OAAA,EAAU,KAAK,CAAA,CAAE,CAAC,CAAC;YAEpF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAEjE,IAAI,CAAA,QAAQ,KAAA,IAAA,IAAR,QAAQ,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAR,QAAQ,CAAE,MAAM,MAAK,GAAG,EAAE;AAC5B,gBAAA,MAAM,SAAS,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;AACzC,gBAAA,IAAI,SAAS,EAAE;AACb,oBAAA,MAAM,aAAa,GAAc,cAAc,CAAC,SAAS,CAAC,CAAC;oBAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,WAAW,GAAG,SAAS,CAAC,YAAY,CAAC;oBAC3E,MAAM,aAAa,GAAGF,mBAAU,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAC;oBACxE,MAAM,YAAY,GAAG,aAAa,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACzD,oBAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;oBACjC,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,UAAU,EAAE,eAAe,CAAC,CAAC;AAE5E,oBAAA,MAAM,iBAAiB,GAAG,CACxB,MAAM,oBAAoB,CAAC;wBACzB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,wBAAA,cAAc,EAAE;4BACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,yBAAA;AACD,wBAAA,QAAQ,EAAE,QAAQ;qBACnB,CAAC,EACF,KAAK,CAAC;oBAER,QAAQ,GAAG,oBAAoB,CAAC;AAChC,oBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,SAAS,CAAC,eAAe,CAAC,aAAa,EACvC,CAAA,OAAA,EAAU,iBAAiB,CAAA,CAAE,CAC9B,CAAC;oBACF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAClD,iBAAA;AACF,aAAA;AAED,YAAA,OAAO,QAAQ,CAAC;SACjB;AACF,KAAA;IAED,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,+CAA+C,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACjF;KACF,CAAC;AACJ;;AC9VA;AAgGA;;;;AAIG;AACG,SAAU,cAAc,CAAC,QAAiB,EAAA;AAC9C,IAAA,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAC7C,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IAED,MAAM,YAAY,GAAG,QAAwB,CAAC;IAE9C,QACE,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,SAAS,CAAC;AACrC,QAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ;AACxC,QAAA,OAAO,YAAY,CAAC,sBAAsB,KAAK,UAAU,EACzD;AACJ,CAAC;AAED;;;;;;;AAOG;MACU,QAAQ,CAAA;AAUnB;;;;;AAKG;IACH,WAAY,CAAA,SAAiC,EAAE,OAAA,GAA2B,EAAE,EAAA;AAC1E,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;;;AAG3B,QAAA,IAAI,CAAC,OAAO,GACP,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,KACV,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,GAC/D,CAAC;KACH;AAED;;;;;AAKG;IACI,sBAAsB,GAAA;QAC3B,OAAO;AACL,YAAA,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU;YACnC,sBAAsB,EAAE,IAAI,CAAC,SAAS;SACvC,CAAC;KACH;AACF,CAAA;AAiCD;;;;;;AAMG;SACa,WAAW,CACzB,UAA+E,EAC/E,kBAA0C,EAAE,EAAA;;IAE5C,IAAI,UAAU,KAAK,SAAS,EAAE;AAC5B,QAAA,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;AACxC,KAAA;;;;IAMD,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;AACrF,IAAA,MAAM,SAAS,GAA2B;QACxCS,sBAAa,CAAC,EAAE,SAAS,EAAE,eAAe,CAAC,eAAe,EAAE,CAAC;AAC7D,QAAAC,wBAAe,CAAC,eAAe,CAAC,gBAAgB,CAAC;QACjD,eAAe;AACf,QAAAC,sCAA6B,EAAE;AAC/B,QAAA,IAAI,2BAA2B,EAAE;AACjC,QAAA,IAAI,yBAAyB,CAAC,eAAe,CAAC,YAAY,CAAC;;;;QAI3DC,8BAAqB,CAAC,SAAS,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;AACrD,QAAAC,kBAAS,CAAC;YACR,MAAM,EAAE,MAAM,CAAC,IAAI;AACnB,YAAA,kBAAkB,EAAE,oCAAoC;AACxD,YAAA,sBAAsB,EAAE,wCAAwC;SACjE,CAAC;KACH,CAAC;AAEF,IAAA,IAAIZ,eAAM,EAAE;;QAEV,SAAS,CAAC,IAAI,CAACa,oBAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;AAC1D,QAAA,SAAS,CAAC,IAAI,CAACC,2CAAkC,EAAE,CAAC,CAAC;AACtD,KAAA;AACD,IAAA,SAAS,CAAC,IAAI,CACZC,0BAAiB,CAAC,UAAU,CAAC;AAC3B,UAAE,gBAAgB,CACd,+CAA+C,CAC7C,UAAU,EACV,CAAA,EAAA,GAAA,eAAe,CAAC,QAAQ,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,kBAAkB,CAC/C,EACD,UAAU,CACX;UACD,UAAU,CACf,CAAC;AAEF,IAAA,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAClD;;AC1PA;AASA;;AAEG;AACG,MAAO,gCAAiC,SAAQ,gBAAgB,CAAA;AAMpE;;;;;AAKG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,OAAmC,EAAA;AAEnC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;KACxB;AAED;;;;AAIG;AACO,IAAA,WAAW,CAAC,OAAoB,EAAA;AACxC,QAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QAEzE,IACE,OAAO,CAAC,IAAI;AACZ,aAAC,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAK,OAAO,CAAC,IAAe,KAAK,SAAS,CAAC;AAC5E,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EACvB;AACA,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,cAAc,EAAE,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,SAAA;AAED,QAAA,MAAM,YAAY,GAChB;AACE,YAAA,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE;YAC5B,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,cAAc,CAAC;YAClE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC;YAC/D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,YAAY,CAAC;YAChE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,CAAC;YACxD,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,iBAAiB,CAAC;YACrE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC5D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,aAAa,CAAC;YACjE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,mBAAmB,CAAC;YACvE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC;SAC1D,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,IAAI;AACJ,YAAA,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC;AAC3C,YAAA,IAAI,CAAC,8BAA8B,CAAC,OAAO,CAAC,CAAC;QAE/C,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AACvE,QAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CACjB,eAAe,CAAC,aAAa,EAC7B,aAAa,IAAI,CAAC,OAAO,CAAC,WAAW,IAAI,SAAS,CAAA,CAAE,CACrD,CAAC;;;;;AAMF,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;;;;;AAMG;IACK,oBAAoB,CAAC,OAAoB,EAAE,UAAkB,EAAA;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QAE9C,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,OAAO,EAAE,CAAC;AACX,SAAA;;;;QAKD,IAAI,UAAU,KAAK,eAAe,CAAC,cAAc,IAAI,KAAK,KAAK,GAAG,EAAE;AAClE,YAAA,OAAO,EAAE,CAAC;AACX,SAAA;AAED,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;;;;;;;;AAYG;AACK,IAAA,6BAA6B,CAAC,OAAoB,EAAA;AACxD,QAAA,IAAI,YAAY,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,KAAI;AACjE,YAAA,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,kBAAkB,CAAC,CAAC;AACjF,SAAC,CAAC,CAAC;QAEH,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAY;AACjC,YAAA,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;AAClE,SAAC,CAAC,CAAC;;AAGH,QAAA,YAAY,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,KAAI;YACzD,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;AACjF,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACd,SAAC,CAAC,CAAC;QAEH,IAAI,gCAAgC,GAAW,EAAE,CAAC;AAClD,QAAA,YAAY,CAAC,OAAO,CAAC,CAAC,MAAM,KAAI;AAC9B,YAAA,gCAAgC,IAAI,CAAA,EAAG,MAAM,CAAC,IAAI;AAC/C,iBAAA,WAAW,EAAE;iBACb,SAAS,EAAE,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAA,EAAA,CAAI,CAAC;AAChD,SAAC,CAAC,CAAC;AAEH,QAAA,OAAO,gCAAgC,CAAC;KACzC;AAED;;;;AAIG;AACK,IAAA,8BAA8B,CAAC,OAAoB,EAAA;QACzD,MAAM,IAAI,GAAG,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAE5C,IAAI,2BAA2B,GAAW,EAAE,CAAC;QAC7C,2BAA2B,IAAI,CAAI,CAAA,EAAA,IAAI,CAAC,OAAO,CAAC,WAAW,CAAA,EAAG,IAAI,CAAA,CAAE,CAAC;QAErE,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,gBAAgB,GAA8B,EAAE,CAAC;AACvD,QAAA,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GAAa,EAAE,CAAC;AAC/B,YAAA,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;AACtD,oBAAA,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC;oBACvC,gBAAgB,CAAC,YAAY,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;AAC9C,oBAAA,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AAC9B,iBAAA;AACF,aAAA;YAED,SAAS,CAAC,IAAI,EAAE,CAAC;AACjB,YAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;AAC3B,gBAAA,2BAA2B,IAAI,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,EAAI,kBAAkB,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAA,CAAE,CAAC;AACxF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,2BAA2B,CAAC;KACpC;AACF;;AChLD;AASA;;;;AAIG;AACG,MAAO,0BAA2B,SAAQ,UAAU,CAAA;AAWxD;;;;AAIG;IACH,WAAY,CAAA,WAAmB,EAAE,UAAkB,EAAA;AACjD,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;KACrD;AAED;;;;;AAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACxE;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,YAAoB,EAAA;QAC3C,OAAOC,iBAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;KAC5F;AACF;;ACzDD;;;;;;AAMG;AAKH,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEpB,MAAA,oBAAqB,SAAQvU,mBAAQ,CAAC,aAAa,CAAA;AAI9D;;;;;AAKG;IACH,WAAY,CAAA,GAAW,EAAE,OAAqC,EAAA;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;AACrB,YAAA,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;AACzC,SAAA;;QAGD,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,YAAA,MAAM,gBAAgB,GAAGA,mBAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,CAAG,EAAA,WAAW,IAAI,cAAc,CAAA,CAAA,EAAI,gBAAgB,CAAA,CAAE,CAAC;AAC5E,SAAA;AAED,QAAA,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AAE1B,QAAA,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;;AAG3C,QAAA,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;;QAGf,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;KAChD;AACF;;ACnDD;AAqBA;;;AAGG;MACmB,aAAa,CAAA;AAyBjC;;;;AAIG;IACH,WAAsB,CAAA,GAAW,EAAE,QAAsB,EAAA;;AAEvD,QAAA,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;AAC9C,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;AAEF,QAAA,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;AAE7D,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;AAC7C,YAAA,IACE,CAACuT,eAAM,IAAI,OAAO,YAAY,0BAA0B;gBACxD,OAAO,YAAY,mBAAmB,EACtC;AACA,gBAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;AAC3B,aAAA;AAAM,iBAAA,IAAIe,0BAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;;;AAGzD,gBAAA,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;AAC/C,aAAA;AACF,SAAA;;AAGD,QAAA,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;AAC9D,QAAA,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AACF;;ACrFD;AAMA;;;AAGG;AACI,MAAM,UAAU,GAAGE,8BAAkB,CAAC;AAC3C,IAAA,aAAa,EAAE,oBAAoB;AACnC,IAAA,SAAS,EAAE,mBAAmB;AAC/B,CAAA,CAAC,CAAC;AAEH;;;;;;AAMG;AACG,SAAU,kCAAkC,CAChD,OAA0B,EAAA;;IAE1B,OAAO;;QAEL,WAAW,EAAE,CAAC,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,cAAsB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,WAAW;QAC1D,cAAc,EAAE,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,cAAc,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,cAAc;KACxD,CAAC;AACJ;;AC9BA;AACA;AAEA;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;AAiGE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;KA6CzC;AAlMC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEpD,QAAA,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;AAC9B,YAAA,QAAQ,IAAI;AACV,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;oBACxC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBAChD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC1C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,uBAAuB,IAAI,CAAA,CAAE,CAAC,CAAC;AACvD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAAsC,EAAA;AACvD,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QACpD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;AAChC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;AAC/B,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;AACjC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;AACzC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;AAC/B,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;AAChC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACjD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;AAC3C,SAAA;AACD,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAyDD;;;;;AAKG;IACI,QAAQ,GAAA;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;AC/MD;AACA;AAEA;;;;;;AAMG;MACU,uBAAuB,CAAA;AAApC,IAAA,WAAA,GAAA;AA6GE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;AAExC;;AAEG;QACI,IAAY,CAAA,YAAA,GAAY,KAAK,CAAC;KAqDtC;AAhOC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAE9D,QAAA,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;AAC9B,YAAA,QAAQ,IAAI;AACV,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACrC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC7C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACrD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC/C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC5C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,sBAAsB,IAAI,CAAA,CAAE,CAAC,CAAC;AACtD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAA2C,EAAA;AAC5D,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAC9D,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;AACvC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;AACtC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;AACvC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;AAC9C,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;AACxC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACtD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;AAChD,SAAA;QACD,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B,YAAA,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;AAC7C,SAAA;AACD,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAmED;;;;;;;AAOG;IACI,QAAQ,GAAA;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,YAAY,EAAE;AACrB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;AC3OD;AAMA;;;;;AAKG;MACU,2BAA2B,CAAA;AAgBtC;;;;AAIG;IACH,WAAY,CAAA,WAAmB,EAAE,iBAAoC,EAAA;AACnE,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;AAC3C,QAAA,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;KAC3D;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,YAAoB,EAAA;;QAG3C,OAAOD,iBAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;KACrF;AACF;;ACjDD;AACA;AAkBA;;;;;;AAMG;AACG,SAAU,eAAe,CAAC,OAAmB,EAAA;IACjD,OAAO,OAAO,CAAC,GAAG,GAAG,CAAA,EAAG,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,GAAG,CAAE,CAAA,GAAG,OAAO,CAAC,KAAK,CAAC;AACzE;;AC5BA;AAOA;;AAEG;AACSE,6BAUX;AAVD,CAAA,UAAY,WAAW,EAAA;AACrB;;AAEG;AACH,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AAEf;;AAEG;AACH,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,YAA2B,CAAA;AAC7B,CAAC,EAVWA,mBAAW,KAAXA,mBAAW,GAUtB,EAAA,CAAA,CAAA,CAAA;AA4FD;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAsN7B,IAAA,WAAA,CACE,OAAe,EACf,SAAiB,EACjB,oBAAyD,EACzD,QAAiB,EACjB,aAAsB,EACtB,QAAsB,EACtB,QAAe,EACf,SAAgB,EAChB,OAAoB,EACpB,UAAmB,EACnB,QAAiB,EACjB,YAAqB,EACrB,kBAA2B,EAC3B,eAAwB,EACxB,eAAwB,EACxB,WAAoB,EACpB,iBAAqC,EACrC,0BAAmC,EACnC,aAAsB,EACtB,eAAwB,EAAA;AAExB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,oBAAoB,KAAK,SAAS,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;;AAElF,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;AACpD,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;AACxD,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;AAChD,YAAA,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC;AACjD,YAAA,IAAI,CAAC,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;AAClD,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,YAAY,CAAC;AACtD,YAAA,IAAI,CAAC,kBAAkB,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;AAClE,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YAEpD,IAAI,oBAAoB,CAAC,iBAAiB,EAAE;gBAC1C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBACvE,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,eAAe,CAAC;gBAC9E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAC1E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;AAE1E,gBAAA,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC,0BAA0B,CAAC;AAClF,gBAAA,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;AACzD,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,YAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC3B,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;AACxC,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC;AAC5B,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;AACjC,YAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAE/B,YAAA,IAAI,iBAAiB,EAAE;AACrB,gBAAA,IAAI,CAAC,SAAS,GAAG,iBAAiB,CAAC,cAAc,CAAC;AAClD,gBAAA,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;AACvD,gBAAA,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;AACvD,gBAAA,IAAI,CAAC,eAAe,GAAG,iBAAiB,CAAC,eAAe,CAAC;AACzD,gBAAA,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;AACrD,gBAAA,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;AAErD,gBAAA,IAAI,CAAC,0BAA0B,GAAG,0BAA0B,CAAC;AAC7D,gBAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACpC,aAAA;AACF,SAAA;KACF;AA1JD;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;QAChB,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,OAAO;AACL,gBAAA,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,GAAG;AAC1B,gBAAA,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,KAAK;aAC/B,CAAC;AACH,SAAA;AACD,QAAA,OAAO,SAAS,CAAC;KAClB;AA+ID;;;AAGG;IACI,QAAQ,GAAA;AACb,QAAA,MAAM,MAAM,GAAa;YACvB,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,KAAK;YACL,OAAO;YACP,OAAO;YACP,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,OAAO;YACP,MAAM;SACP,CAAC;QACF,MAAM,OAAO,GAAa,EAAE,CAAC;AAE7B,QAAA,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;AAC1B,YAAA,QAAQ,KAAK;AACX,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC3D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,SAAS,CACvE,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,SAAS,CACzE,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,OAAO,GAAG,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,SAAS,CACzD,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;oBAC9D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;oBAClE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,IAAI,CAAC,cAAc,EAAE,KAAK,CAAC,GAAG,SAAS,CACnF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,GAAG,SAAS,CACrF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;oBAChE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,kBAAkB,CAAC,CAAC;oBACtE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;AACR,gBAAA,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,0BAA0B,CAAC,CAAC;oBAC9E,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACT,aAAA;AACF,SAAA;AACD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC1B;AAED;;;;;;AAMG;AACK,IAAA,uBAAuB,CAAC,OAAiB,EAAE,GAAW,EAAE,KAAc,EAAA;QAC5E,IAAI,CAAC,KAAK,EAAE;YACV,OAAO;AACR,SAAA;AAED,QAAA,GAAG,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,CAAA,EAAG,GAAG,CAAI,CAAA,EAAA,KAAK,CAAE,CAAA,CAAC,CAAC;AACjC,SAAA;KACF;AACF;;AC9jBD;SAsPgB,8BAA8B,CAC5C,sBAA8C,EAC9C,sCAAsF,EACtF,WAAoB,EAAA;AAEpB,IAAA,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC;AAElG,IAAA,MAAM,mBAAmB,GACvB,sCAAsC,YAAY,0BAA0B;AAC1E,UAAE,sCAAsC;UACtC,SAAS,CAAC;AAChB,IAAA,IAAI,2BAAoE,CAAC;AAEzE,IAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,WAAW,KAAK,SAAS,EAAE;QAClE,2BAA2B,GAAG,IAAI,2BAA2B,CAC3D,WAAW,EACX,sCAA2D,CAC5D,CAAC;AACH,KAAA;AAED,IAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,2BAA2B,KAAK,SAAS,EAAE;AAClF,QAAA,MAAM,SAAS,CAAC,gEAAgE,CAAC,CAAC;AACnF,KAAA;;IAGD,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,SAAA;AACF,KAAA;;;;IAKD,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;;YAEL,IAAI,OAAO,IAAI,YAAY,EAAE;AAC3B,gBAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,aAAA;AAAM,iBAAA;AACL,gBAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;IAED,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,kGAAkG,CACnG,CAAC;AACH,SAAA;AACF,KAAA;AAED,IAAA,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;AAChB,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAEtE,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,SAAS,EACT,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,CAC9C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;AACT,QAAA,sBAAsB,CAAC,aAAa;AACpC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,CACrC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;AACT,QAAA,sBAAsB,CAAC,aAAa;AACpC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,gBAAgB,CAAC,WAAmB,EAAE,aAAqB,EAAE,QAAiB,EAAA;;;IAGrF,MAAM,QAAQ,GAAa,CAAC,CAAA,MAAA,EAAS,WAAW,CAAI,CAAA,EAAA,aAAa,CAAE,CAAA,CAAC,CAAC;AACrE,IAAA,IAAI,QAAQ,EAAE;AACZ,QAAA,QAAQ,CAAC,IAAI,CAAC,IAAI,QAAQ,CAAA,CAAE,CAAC,CAAC;AAC/B,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,wCAAwC,CAC/C,sBAA8C,EAAA;AAE9C,IAAA,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC;AAClG,IAAA,IAAI,sBAAsB,CAAC,YAAY,IAAI,OAAO,GAAG,YAAY,EAAE;AACjE,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,YAAY,EAAE;AACxF,QAAA,MAAM,UAAU,CAAC,wDAAwD,CAAC,CAAC;AAC5E,KAAA;AAED,IAAA,IAAI,sBAAsB,CAAC,SAAS,IAAI,OAAO,GAAG,YAAY,EAAE;AAC9D,QAAA,MAAM,UAAU,CAAC,+DAA+D,CAAC,CAAC;AACnF,KAAA;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,SAAS,EAAE;AACrF,QAAA,MAAM,UAAU,CAAC,qDAAqD,CAAC,CAAC;AACzE,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,qBAAqB;QACxD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,aAAa;QAChD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,eAAe;QAClD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,GAAG;QACtC,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,OAAO,GAAG,YAAY;AACtB,QAAA,sBAAsB,CAAC,WAAW;AAClC,SAAC,sBAAsB,CAAC,WAAW,CAAC,IAAI,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,EACvF;AACA,QAAA,MAAM,UAAU,CAAC,6EAA6E,CAAC,CAAC;AACjG,KAAA;IAED,IACE,OAAO,GAAG,YAAY;AACtB,QAAA,sBAAsB,CAAC,WAAW;AACjC,QAAA,sBAAsB,CAAC,WAAuC,CAAC,YAAY,EAC5E;AACA,QAAA,MAAM,UAAU,CAAC,sEAAsE,CAAC,CAAC;AAC1F,KAAA;IAED,IACE,OAAO,GAAG,YAAY;SACrB,sBAAsB,CAAC,0BAA0B,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAC3F;AACA,QAAA,MAAM,UAAU,CACd,mGAAmG,CACpG,CAAC;AACH,KAAA;AAED,IAAA,IAAI,sBAAsB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;AACpE,QAAA,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;AAC/F,KAAA;AAED,IAAA,sBAAsB,CAAC,OAAO,GAAG,OAAO,CAAC;AACzC,IAAA,OAAO,sBAAsB,CAAC;AAChC;;AC3iCA;AAgGA;;AAEG;MACU,eAAe,CAAA;AAwB1B;;;;AAIG;IACH,WAAY,CAAA,MAAoC,EAAE,OAAgB,EAAA;AAChE,QAAA,MAAM,aAAa,GAAG,IAAI,oBAAoB,CAC5C,MAAM,CAAC,GAAG,EACT,MAAc,CAAC,QAAQ,CAAC,sBAAsB,EAAE,CAClD,CAAC;AACF,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC;AAEvB,QAAA,IAAK,MAAqB,CAAC,IAAI,KAAK,SAAS,EAAE;AAC7C,YAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;YACzB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,aAAa,CAAC,CAAC;AAC/D,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;YAC1B,IAAI,CAAC,yBAAyB,GAAG,IAAIC,MAAW,CAAC,aAAa,CAAC,CAAC;AACjE,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAGC,qBAAY,EAAE,CAAC;AAC1B,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;KACzB;AA1CD;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,IAAW,GAAG,GAAA;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;AA4BD;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,YAAY,CACvB,QAAgB,EAChB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EACR,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAE3C,CAAA,EAAA,eAAe,EAAE,IAAI,CAAC,QAAQ,EAC3B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,WAAW,CACtB,eAAuB,EACvB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC/D,IAAI,CAAC,QAAQ,EACb,eAAe,kBAEb,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;AACF,YAAA,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC;AAChC,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,YAAY,CAAC,OAAA,GAAiC,EAAE,EAAA;;AAC3D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,kBACpE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,UAAU,CAAC,OAAA,GAAiC,EAAE,EAAA;;AACzD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,kBAClE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,UAAU,CACrB,WAAmB,EACnB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,MAAM,gBAAgB,GAAA,MAAA,CAAA,MAAA,CAAA,EACpB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EACX,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CACtD,CAAC;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;AAC1E,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACrYD;AAoCA;;;;AAIG;AACG,MAAO,uBAAwB,SAAQC,eAAQ,CAAA;AAWnD;;;;;;;;;AASG;IACH,WACE,CAAA,MAA6B,EAC7B,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,OAAA,GAA0C,EAAE,EAAA;QAE5C,KAAK,CAAC,EAAE,aAAa,EAAE,OAAO,CAAC,aAAa,EAAE,CAAC,CAAC;QAtB1C,IAAO,CAAA,OAAA,GAAW,CAAC,CAAC;AAoDpB,QAAA,IAAA,CAAA,iBAAiB,GAAG,CAAC,IAAY,KAAI;AAC3C,YAAA,IAAI,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;AAClC,gBAAA,IAAI,CAAC,OAAO,CAAC,iBAAiB,GAAG,SAAS,CAAC;AAC3C,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;AACpB,gBAAA,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC;AACvC,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACxB,OAAO;AACR,aAAA;;;;AAKD,YAAA,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,IAAI,CAAC,UAAU,EAAE;AACnB,gBAAA,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;AAC5D,aAAA;AACD,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACpB,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;AACrB,aAAA;AACH,SAAC,CAAC;AAEM,QAAA,IAAA,CAAA,uBAAuB,GAAG,CAAC,GAAW,KAAI;AAChD,YAAA,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;AACpC,gBAAA,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAClB,OAAO;AACR,aAAA;;;;;;YAOD,IAAI,CAAC,yBAAyB,EAAE,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;AAChC,gBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjB,aAAA;AAAM,iBAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE;;;;AAIlC,gBAAA,IAAI,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACxC,oBAAA,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC;AAClB,oBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;AACrB,yBAAA,IAAI,CAAC,CAAC,SAAS,KAAI;AAClB,wBAAA,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;wBACxB,IAAI,CAAC,sBAAsB,EAAE,CAAC;wBAC9B,OAAO;AACT,qBAAC,CAAC;AACD,yBAAA,KAAK,CAAC,CAAC,KAAK,KAAI;AACf,wBAAA,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACtB,qBAAC,CAAC,CAAC;AACN,iBAAA;AAAM,qBAAA;oBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,CAAA,mHAAA,EACE,IAAI,CAAC,MAAM,GAAG,CAChB,CAAA,sBAAA,EAAyB,IAAI,CAAC,GAAG,CAAA,WAAA,EAAc,IAAI,CAAC,OAAO,CAAA,eAAA,EACzD,IAAI,CAAC,gBACP,CAAA,CAAE,CACH,CACF,CAAC;AACH,iBAAA;AACF,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,4FACE,IAAI,CAAC,GACP,CAAsB,mBAAA,EAAA,IAAI,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA,CACxC,CACF,CAAC;AACH,aAAA;AACH,SAAC,CAAC;AAnGA,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC;AACpB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC;AAC9B,QAAA,IAAI,CAAC,gBAAgB;AACnB,YAAA,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,GAAG,CAAC,CAAC;AAC3F,QAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;AACrC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,IAAI,CAAC,sBAAsB,EAAE,CAAC;KAC/B;IAEM,KAAK,GAAA;AACV,QAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;KACtB;IAEO,sBAAsB,GAAA;QAC5B,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;KACvD;IAEO,yBAAyB,GAAA;QAC/B,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC3D,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAChE,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;KACnE;IA0ED,QAAQ,CAAC,KAAmB,EAAE,QAAiC,EAAA;;QAE7D,IAAI,CAAC,yBAAyB,EAAE,CAAC;AAChC,QAAA,IAAI,CAAC,MAAmB,CAAC,OAAO,EAAE,CAAC;AAEpC,QAAA,QAAQ,CAAC,KAAK,KAAK,IAAI,GAAG,SAAS,GAAG,KAAK,CAAC,CAAC;KAC9C;AACF;;AClLD;AAoBA;;;;;;;;;AASG;MACU,oBAAoB,CAAA;AAse/B;;;;;;;;AAQG;IACH,WACE,CAAA,gBAA4C,EAC5C,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,OAAA,GAA0C,EAAE,EAAA;AAE5C,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,uBAAuB,CACnD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,MAAM,EACN,MAAM,EACN,KAAK,EACL,OAAO,CACR,CAAC;KACH;AA7fD;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;;;;;;;AAWG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;KACrC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,qBAAqB,GAAA;AAC9B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;KACpD;AAED;;;;;;AAMG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;;AAKG;AACH,IAAA,IAAW,uBAAuB,GAAA;AAChC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;KACtD;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;AAIG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,iBAAiB,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;KAChD;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,cAAc,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;KAC7C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;KACtC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;AAIG;AACH,IAAA,IAAW,gBAAgB,GAAA;AACzB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;KAC/C;AAED;;;;;AAKG;AACH,IAAA,IAAW,mBAAmB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;KAClD;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,IAAW,oCAAoC,GAAA;AAC7C,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,oCAAoC,CAAC;KACnE;AAED;;;;AAIG;AACH,IAAA,IAAW,iCAAiC,GAAA;AAC1C,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iCAAiC,CAAC;KAChE;AAED;;;;AAIG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,IAAW,2BAA2B,GAAA;AACpC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;KAC1D;AAED;;;;AAIG;AACH,IAAA,IAAW,sBAAsB,GAAA;AAC/B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,sBAAsB,CAAC;KACrD;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,kBAAkB,GAAA;QAC3B,OAAOtB,eAAM,GAAG,IAAI,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AAED;;AAEG;AACH,IAAA,IAAW,SAAS,GAAA;AAGlB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AA8BF;;AC7hBD;AACA;AAEO,MAAM,qBAAqB,GAAW,EAAE,CAAC;AACzC,MAAM,eAAe,GAAe,IAAI,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAW,YAAY,CAAC;AAC5C,MAAM,eAAe,GAAW,aAAa;;ACNpD;AACA;MAqBa,UAAU,CAAA;AACrB;;;;;;AAMG;IACI,aAAa,cAAc,CAChC,MAAoB,EACpB,MAAc,EACd,OAAA,GAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAC9E,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,MAAM,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;AACpC,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;AAKG;IACK,aAAa,QAAQ,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAChE,QAAA,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;KACf;;;;IAKO,aAAa,cAAc,CACjC,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,IAAI,iBAAiB,GAAG,CAAC,CAAC;AAC1B,QAAA,IAAI,IAAI,EAAE,YAAY,EAAE,mBAAmB,CAAC;QAE5C,GAAG;YACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAClD,YAAA,YAAY,GAAG,IAAI,GAAG,IAAI,CAAC;YAC3B,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,KAAK,iBAAiB,CAAC;YACpD,iBAAiB,IAAI,CAAC,CAAC;AACxB,SAAA,QAAQ,YAAY,IAAI,iBAAiB,GAAG,EAAE,EAAE;AAEjD,QAAA,IAAI,YAAY,EAAE;;;YAGhB,aAAa,GAAG,aAAa,CAAC;AAC9B,YAAA,mBAAmB,GAAG,SAAS,CAAC;YAChC,GAAG;gBACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAClD,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,mBAAmB,CAAC;AACrD,gBAAA,mBAAmB,IAAI,GAAG,CAAC;aAC5B,QAAQ,IAAI,GAAG,IAAI,EAAE;YAEtB,MAAM,GAAG,GAAG,CAAC,aAAa,GAAG,CAAC,GAAG,EAAE,aAAa,GAAG,CAAC,CAAC,GAAG,aAAa,IAAI,CAAC,CAAC;YAC3E,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,EAAE;AAClE,gBAAA,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;AACtC,aAAA;AACD,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAED,QAAA,OAAO,CAAC,aAAa,IAAI,CAAC,IAAI,EAAE,aAAa,GAAG,CAAC,CAAC,CAAC;KACpD;IAEM,aAAa,QAAQ,CAC1B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACnD;IAEM,aAAa,OAAO,CACzB,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACnD;IAEM,aAAa,QAAQ,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC;KACb;IAEM,aAAa,WAAW,CAC7B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,CAAC,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,KAAK,CAAC,EAAE;AACX,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;aAAM,IAAI,CAAC,KAAK,CAAC,EAAE;AAClB,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;AAC5C,SAAA;KACF;IAEM,aAAa,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAClE,QAAA,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;KACjC;IAEM,aAAa,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAClE,QAAA,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;KACjC;IAEM,aAAa,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,IAAI,IAAI,GAAG,CAAC,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC7C,SAAA;AAED,QAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;KAChE;IAEM,aAAa,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC1D,QAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AACtC,QAAA,OAAO,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAClC;IAEO,aAAa,WAAW,CAC9B,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;;QAEzD,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACpD,QAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;KACvB;IAEM,aAAa,OAAO,CACzB,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA8B,GAAA,EAAE,KACJ;YAC5B,OAAO,UAAU,CAAC,WAAW,CAAC,CAAC,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;AACzD,SAAC,CAAC;AAEF,QAAA,MAAM,KAAK,GAAsB,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;QAE7F,MAAM,IAAI,GAAsB,EAAE,CAAC;AACnC,QAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;AAC7B,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;IAEO,aAAa,SAAS,CAC5B,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,KAAK,GAAQ,EAAE,CAAC;AACtB,QAAA,KACE,IAAI,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,KAAK,KAAK,CAAC,EACX,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD;YACA,IAAI,KAAK,GAAG,CAAC,EAAE;;gBAEb,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAC3C,KAAK,GAAG,CAAC,KAAK,CAAC;AAChB,aAAA;YAED,OAAO,KAAK,EAAE,EAAE;gBACd,MAAM,IAAI,GAAM,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACtD,gBAAA,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAClB,aAAA;AACF,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AACF,CAAA;AAOD,IAAK,WAOJ,CAAA;AAPD,CAAA,UAAK,WAAW,EAAA;AACd,IAAA,WAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACjB,IAAA,WAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,WAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACjB,CAAC,EAPI,WAAW,KAAX,WAAW,GAOf,EAAA,CAAA,CAAA,CAAA;AAYD,IAAK,aASJ,CAAA;AATD,CAAA,UAAK,aAAa,EAAA;AAChB,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,aAAA,CAAA,SAAA,CAAA,GAAA,SAAmB,CAAA;AACnB,IAAA,aAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACjB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACnB,CAAC,EATI,aAAa,KAAb,aAAa,GASjB,EAAA,CAAA,CAAA,CAAA;MAEqB,QAAQ,CAAA;AAS5B;;AAEG;IACI,OAAO,UAAU,CAAC,MAAuB,EAAA;AAC9C,QAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;AAC1C,SAAA;AAAM,aAAA,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAChC,YAAA,OAAO,QAAQ,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;AACzC,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAsB,CAAC,CAAC;AAC1D,SAAA;KACF;IAEO,OAAO,gBAAgB,CAAC,MAAc,EAAA;AAC5C,QAAA,QAAQ,MAAM;YACZ,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,OAAO,CAAC;YAC3B,KAAK,aAAa,CAAC,GAAG,CAAC;YACvB,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM,CAAC;YAC1B,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM;AACvB,gBAAA,OAAO,IAAI,iBAAiB,CAAC,MAAuB,CAAC,CAAC;AACxD,YAAA;AACE,gBAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,CAAA,CAAE,CAAC,CAAC;AACrD,SAAA;KACF;IAEO,OAAO,eAAe,CAAC,MAAa,EAAA;AAC1C,QAAA,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;KAC3D;IAEO,OAAO,gBAAgB,CAAC,MAAoB,EAAA;AAClD,QAAA,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;;QAEzB,IAAI;AACF,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;AACxC,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;;AAElB,SAAA;AAED,QAAA,QAAQ,IAAI;YACV,KAAK,WAAW,CAAC,MAAM;gBACrB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,CAAA,CAAE,CAAC,CAAC;AAC1E,iBAAA;AACD,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;AAChB,oBAAA,MAAM,IAAI,KAAK,CAAC,sDAAsD,MAAM,CAAA,CAAE,CAAC,CAAC;AACjF,iBAAA;;gBAGD,MAAM,MAAM,GAA6B,EAAE,CAAC;AAC5C,gBAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,CAAA,CAAE,CAAC,CAAC;AACnF,iBAAA;AACD,gBAAA,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE;AACjC,oBAAA,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACtD,iBAAA;gBACD,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;YACjD,KAAK,WAAW,CAAC,IAAI;gBACnB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,CAAA,CAAE,CAAC,CAAC;AAC1E,iBAAA;AACD,gBAAA,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;AACnB,oBAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,MAAM,CAAA,CAAE,CAAC,CAAC;AACpF,iBAAA;AACD,gBAAA,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAC1C,KAAK,WAAW,CAAC,GAAG;AAClB,gBAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,CAAA,CAAE,CAAC,CAAC;AACnF,iBAAA;AACD,gBAAA,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7D,YAAA,KAAK,WAAW,CAAC,KAAK,CAAC;AACvB,YAAA,KAAK,WAAW,CAAC,KAAK,CAAC;AACvB,YAAA;gBACE,MAAM,IAAI,KAAK,CAAC,CAAA,qBAAA,EAAwB,IAAI,CAAO,IAAA,EAAA,MAAM,CAAE,CAAA,CAAC,CAAC;AAChE,SAAA;KACF;AACF,CAAA;AAED,MAAM,iBAAkB,SAAQ,QAAQ,CAAA;AAGtC,IAAA,WAAA,CAAY,SAAwB,EAAA;AAClC,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;KAC7B;AAEM,IAAA,IAAI,CAAC,MAAoB,EAAE,OAAA,GAAiC,EAAE,EAAA;QACnE,QAAQ,IAAI,CAAC,UAAU;YACrB,KAAK,aAAa,CAAC,IAAI;AACrB,gBAAA,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;YAC/B,KAAK,aAAa,CAAC,OAAO;gBACxB,OAAO,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YACjD,KAAK,aAAa,CAAC,GAAG;gBACpB,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC7C,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC9C,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAChD,YAAA;AACE,gBAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;AAC7C,SAAA;KACF;AACF,CAAA;AAED,MAAM,YAAa,SAAQ,QAAQ,CAAA;AAGjC,IAAA,WAAA,CAAY,OAAiB,EAAA;AAC3B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;KACzB;AAEM,IAAA,MAAM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE,EAAA;QACzE,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACxD,QAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;KAC7B;AACF,CAAA;AAED,MAAM,aAAc,SAAQ,QAAQ,CAAA;AAGlC,IAAA,WAAA,CAAY,KAAiB,EAAA;AAC3B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;KACrB;AAEM,IAAA,MAAM,IAAI,CACf,MAAoB,EACpB,UAAiC,EAAE,EAAA;;QAGnC,MAAM,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC5D,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACrD;AACF,CAAA;AAED,MAAM,WAAY,SAAQ,QAAQ,CAAA;AAGhC,IAAA,WAAA,CAAY,QAAkB,EAAA;AAC5B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;KAC3B;AAEM,IAAA,IAAI,CAAC,MAAoB,EAAE,OAAA,GAAiC,EAAE,EAAA;AACnE,QAAA,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA4B,KACF;YAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AACtC,SAAC,CAAC;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;KAC5D;AACF,CAAA;AAED,MAAM,cAAe,SAAQ,QAAQ,CAAA;IAInC,WAAY,CAAA,MAAgC,EAAE,IAAY,EAAA;AACxD,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;AACtB,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;KACnB;AAEM,IAAA,MAAM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE,EAAA;QACzE,MAAM,MAAM,GAAkC,EAAE,CAAC;AACjD,QAAA,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;AAC/B,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;AAC9B,YAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;AAC3D,gBAAA,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC7D,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;ACpcD;AACA;AAOgB,SAAA,WAAW,CAAC,CAAa,EAAE,CAAa,EAAA;IACtD,IAAI,CAAC,KAAK,CAAC;AAAE,QAAA,OAAO,IAAI,CAAC;;AAEzB,IAAA,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI;AAAE,QAAA,OAAO,KAAK,CAAC;AACzC,IAAA,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;AAAE,QAAA,OAAO,KAAK,CAAC;AAExC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACjC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAAE,YAAA,OAAO,KAAK,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,IAAI,CAAC;AACd;;AClBA;MA6Ba,UAAU,CAAA;AAuCrB,IAAA,WAAA,CACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC,EAAA;AAEhC,QAAA,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;AAChD,QAAA,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;AAC1B,QAAA,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;AAC5C,QAAA,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;AACjD,QAAA,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;KACpD;AAhCD,IAAA,IAAW,WAAW,GAAA;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;AAGD,IAAA,IAAW,WAAW,GAAA;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;AA2BO,IAAA,MAAM,UAAU,CAAC,OAAA,GAA4B,EAAE,EAAA;AACrD,QAAA,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;AACH,QAAA,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;AACzC,YAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;AAChD,SAAA;;;AAID,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;QAGH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;AAC9C,QAAA,IAAI,EAAE,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE;AAChE,YAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC7C,SAAA;;AAGD,QAAA,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;AAGH,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;AAE7C,QAAA,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;AAC3B,YAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;AAC1E,SAAA;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;AAEH,QAAA,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAElF,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;AAC9C,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;AAC1C,gBAAA,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;AAChC,aAAA;AACF,SAAA;KACF;IAEM,OAAO,GAAA;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;KAC/D;IAEa,YAAY,CACxB,UAA4B,EAAE,EAAA;;AAE9B,YAAA,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;AACtB,gBAAA,MAAAuB,aAAA,CAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;AAChC,aAAA;AAED,YAAA,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;AACrB,gBAAA,MAAM,MAAM,GAAG,MAAMA,aAAA,CAAA,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,iBAAA,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;AAErB,gBAAA,IAAI,IAAI,CAAC,sBAAsB,KAAK,CAAC,EAAE;AACrC,oBAAA,MAAM,MAAM,GAAG,MAAMA,aAAA,CAAA,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,qBAAA,CAAC,CAAA,CAAC;AAEH,oBAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;AACzE,oBAAA,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;AAC3C,wBAAA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;AACrD,qBAAA;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,MAAMA,aAAA,CAAA,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,yBAAA,CAAC,CAAA,CAAC;AACJ,qBAAA;AAAC,oBAAA,OAAO,GAAQ,EAAE;;AAEjB,wBAAA,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;AACjC,qBAAA;AAED,oBAAA,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;;AAEpC,wBAAA,MAAAA,aAAA,CAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;AACnF,qBAAA;AACF,iBAAA;gBACD,MAAM,MAAAA,aAAA,CAAA,MAAM,CAAA,CAAC;AACd,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AACF;;AClLD;AACA;MAesB,YAAY,CAAA;AAGjC;;ACnBD;AAMA,MAAM,WAAW,GAAG,IAAIpB,0BAAU,CAAC,2CAA2C,CAAC,CAAC;AAE1E,MAAO,sBAAuB,SAAQ,YAAY,CAAA;AAWtD,IAAA,WAAA,CAAY,QAA+B,EAAA;AACzC,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;AAC1B,QAAA,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;KACpB;AAXO,IAAA,YAAY,CAAC,IAAqB,EAAA;AACxC,QAAA,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAC5B,YAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAOD,IAAA,IAAW,QAAQ,GAAA;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;KACvB;AACM,IAAA,MAAM,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE,EAAA;;AACnE,QAAA,IAAI,MAAA,OAAO,CAAC,WAAW,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,OAAO,EAAE;AAChC,YAAA,MAAM,WAAW,CAAC;AACnB,SAAA;QAED,IAAI,IAAI,GAAG,CAAC,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,IAAI,CAAA,CAAE,CAAC,CAAC;AAC/D,SAAA;QAED,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,IAAI,UAAU,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE;AAC5B,YAAA,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;AAC/C,SAAA;;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACxC,QAAA,IAAI,KAAK,EAAE;AACT,YAAA,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;;AAE/B,YAAA,OAAO,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;AACjC,SAAA;AAAM,aAAA;;YAEL,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;;gBAErC,MAAM,OAAO,GAAe,MAAK;oBAC/B,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;oBAC5D,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBACvD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;oBACrD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBAEvD,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACjE,qBAAA;AACH,iBAAC,CAAC;gBAEF,MAAM,gBAAgB,GAAe,MAAK;oBACxC,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAChD,oBAAA,IAAI,aAAa,EAAE;AACjB,wBAAA,IAAI,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;AACvC,wBAAA,OAAO,EAAE,CAAC;;wBAEV,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC,CAAC;AAC3C,qBAAA;AACH,iBAAC,CAAC;gBAEF,MAAM,cAAc,GAAe,MAAK;AACtC,oBAAA,OAAO,EAAE,CAAC;AACV,oBAAA,MAAM,EAAE,CAAC;AACX,iBAAC,CAAC;gBAEF,MAAM,YAAY,GAAe,MAAK;AACpC,oBAAA,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,WAAW,CAAC,CAAC;AACtB,iBAAC,CAAC;gBAEF,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAChD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AAC9D,iBAAA;;AAEH,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;AACF;;AC/FD;AA6BA;;;;AAIG;AACG,MAAO,oBAAqB,SAAQmB,eAAQ,CAAA;AAQhD;;;;;AAKG;IACH,WAAmB,CAAA,MAA6B,EAAE,OAAA,GAAuC,EAAE,EAAA;AACzF,QAAA,KAAK,EAAE,CAAC;QAXF,IAAU,CAAA,UAAA,GAAY,IAAI,CAAC;AAYjC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;AACrC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AAC/B,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,sBAAsB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AAC1E,QAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;KACpF;IAEM,KAAK,GAAA;QACV,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,KAAI;AAChC,gBAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;AAC1B,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;AAEO,IAAA,MAAM,YAAY,GAAA;AACxB,QAAA,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;AACxB,QAAA,IAAI,QAAQ,CAAC;QACb,GAAG;YACD,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACtC,IAAI,QAAQ,CAAC,IAAI,EAAE;gBACjB,MAAM;AACP,aAAA;AACD,YAAA,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC;AAC3B,YAAA,MAAM,MAAM,GAAI,GAAW,CAAC,OAAO,CAAC;AACpC,YAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,gBAAA,MAAM,KAAK,CAAC,gCAAgC,CAAC,CAAC;AAC/C,aAAA;AAED,YAAA,QAAQ,MAAM;AACZ,gBAAA,KAAK,0DAA0D;AAC7D,oBAAA;AACE,wBAAA,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;AAC/B,wBAAA,IAAI,IAAI,YAAY,UAAU,KAAK,KAAK,EAAE;AACxC,4BAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACpD,yBAAA;AACD,wBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE;AACjC,4BAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;AACxB,yBAAA;AACF,qBAAA;oBACD,MAAM;AACR,gBAAA,KAAK,wDAAwD;AAC3D,oBAAA;AACE,wBAAA,MAAM,YAAY,GAAI,GAAW,CAAC,YAAY,CAAC;AAC/C,wBAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;AACpC,4BAAA,MAAM,KAAK,CAAC,+CAA+C,CAAC,CAAC;AAC9D,yBAAA;wBACD,IAAI,IAAI,CAAC,UAAU,EAAE;4BACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC,CAAC;AAChD,yBAAA;AACF,qBAAA;oBACD,MAAM;AACR,gBAAA,KAAK,mDAAmD;oBACtD,IAAI,IAAI,CAAC,UAAU,EAAE;AACnB,wBAAA,MAAM,UAAU,GAAI,GAAW,CAAC,UAAU,CAAC;AAC3C,wBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,4BAAA,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;AACvD,yBAAA;wBACD,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;AAC9C,qBAAA;AACD,oBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;AACR,gBAAA,KAAK,qDAAqD;oBACxD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,wBAAA,MAAM,KAAK,GAAI,GAAW,CAAC,KAAK,CAAC;AACjC,wBAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;AAC9B,4BAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACpD,yBAAA;AACD,wBAAA,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;AAC/B,wBAAA,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAC5B,4BAAA,MAAM,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACnD,yBAAA;AACD,wBAAA,MAAM,WAAW,GAAI,GAAW,CAAC,WAAW,CAAC;AAC7C,wBAAA,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACnC,4BAAA,MAAM,KAAK,CAAC,2CAA2C,CAAC,CAAC;AAC1D,yBAAA;AACD,wBAAA,MAAM,QAAQ,GAAI,GAAW,CAAC,QAAQ,CAAC;AACvC,wBAAA,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAChC,4BAAA,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;AACvD,yBAAA;wBACD,IAAI,CAAC,OAAO,CAAC;4BACX,QAAQ;4BACR,IAAI;AACJ,4BAAA,OAAO,EAAE,KAAK;4BACd,WAAW;AACZ,yBAAA,CAAC,CAAC;AACJ,qBAAA;oBACD,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,KAAK,CAAC,CAAA,eAAA,EAAkB,MAAM,CAAA,yBAAA,CAA2B,CAAC,CAAC;AACpE,aAAA;SACF,QAAQ,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;KAC9C;AACF;;AC/ID;AAkBA;;;;;AAKG;MACU,iBAAiB,CAAA;AAkY5B;;;;;AAKG;IACH,WACE,CAAA,gBAAwC,EACxC,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,QAAA,IAAI,CAAC,kBAAkB,GAAG,IAAI,oBAAoB,CAChD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,OAAO,CACR,CAAC;KACH;AAhZD;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;;;;;;;AAWG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,SAAS,CAAC;KAClB;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;KACrC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,qBAAqB,GAAA;AAC9B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;KACpD;AAED;;;;;;AAMG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;;AAKG;AACH,IAAA,IAAW,uBAAuB,GAAA;AAChC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;KACtD;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,iBAAiB,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;KAChD;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,cAAc,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;KAC7C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;KACtC;AAED;;;;;AAKG;AACH,IAAA,IAAW,mBAAmB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;KAClD;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,SAAS,CAAC;KAClB;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,kBAAkB,GAAA;QAC3B,OAAOtB,eAAM,GAAG,IAAI,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AAED;;AAEG;AACH,IAAA,IAAW,SAAS,GAAA;AAGlB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAqBF;;AC1aD;AAuGA;;;AAGG;AACSwB,+BAkBX;AAlBD,CAAA,UAAY,aAAa,EAAA;AACvB;;AAEG;AACH,IAAA,aAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb;;AAEG;AACH,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb;;;AAGG;AACH,IAAA,aAAA,CAAA,SAAA,CAAA,GAAA,SAAmB,CAAA;AACrB,CAAC,EAlBWA,qBAAa,KAAbA,qBAAa,GAkBxB,EAAA,CAAA,CAAA,CAAA;AAED;;;;AAIG;AACSC,qCA6CX;AA7CD,CAAA,UAAY,mBAAmB,EAAA;AAC7B;;AAEG;AACH,IAAA,mBAAA,CAAA,IAAA,CAAA,GAAA,IAAS,CAAA;AACT;;AAEG;AACH,IAAA,mBAAA,CAAA,IAAA,CAAA,GAAA,IAAS,CAAA;AACT;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACb,CAAC,EA7CWA,2BAAmB,KAAnBA,2BAAmB,GA6C9B,EAAA,CAAA,CAAA,CAAA;AAEK,SAAU,YAAY,CAC1B,IAA8D,EAAA;IAE9D,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,OAAO,IAAkB,CAAC;AAC5B,CAAC;AAEe,SAAA,oBAAoB,CAAC,GAAwB,EAAE,OAAgB,EAAA;AAC7E,IAAA,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;AACnB,QAAA,MAAM,IAAI,UAAU,CAAC,2DAA2D,CAAC,CAAC;AACnF,KAAA;AAED,IAAA,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,mBAAmB,EAAE;AACnC,QAAA,GAAG,CAAC,mBAAmB,GAAG,wBAAwB,CAAC;AACpD,KAAA;AACH,CAAC;AA2HD;;AAEG;AACSC,qCASX;AATD,CAAA,UAAY,mBAAmB,EAAA;AAC7B;;AAEG;AACH,IAAA,mBAAA,CAAA,oBAAA,CAAA,GAAA,oCAAyD,CAAA;AACzD;;AAEG;AACH,IAAA,mBAAA,CAAA,wBAAA,CAAA,GAAA,yCAAkE,CAAA;AACpE,CAAC,EATWA,2BAAmB,KAAnBA,2BAAmB,GAS9B,EAAA,CAAA,CAAA,CAAA;AAEK,SAAU,6BAA6B,CAAC,kBAA0B,EAAA;IACtE,OAAO,CAAA,QAAA,EAAW,kBAAkB,CAAA,+BAAA,CAAiC,CAAC;AACxE;;AChVA;AACA;AA8EA;;;;;AAKG;AACG,SAAU,sBAAsB,CACpC,QAAqF,EAAA;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;AACf,QAAA,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;AACvB,KAAA,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;AACf,QAAA,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;AACvB,KAAA,CAAC,CAAC,CAAC;IAEJ,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,EACJ,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CAAC,SAAS,CAAA,EAAA,EACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,EAEH,CAAA,EAAA,CAAA,CAAA;AACJ;;AC9GA;AA0EA;;;;;AAKG;AACG,MAAO,0BAA2B,SAAQC,cAG/C,CAAA;AAGC,IAAA,WAAA,CAAY,OAA0C,EAAA;AACpD,QAAA,MAAM,EACJ,UAAU,EACV,UAAU,EACV,YAAY,GAAG,KAAK,EACpB,UAAU,EACV,UAAU,EACV,uBAAuB,GACxB,GAAG,OAAO,CAAC;AAEZ,QAAA,IAAI,KAAgD,CAAC;AAErD,QAAA,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC;AACtC,SAAA;AAED,QAAA,MAAM,SAAS,GAAG,qCAAqC,CAClD,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,KAAK,KACR,UAAU;YACV,UAAU;AACV,YAAA,uBAAuB,IACvB,CAAC;QAEH,KAAK,CAAC,SAAS,CAAC,CAAC;AAEjB,QAAA,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;AACpC,YAAA,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;AAC7B,SAAA;AAED,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,KAAK,GAAA;AACV,QAAA,OAAOpB,cAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;KACjC;AACF,CAAA;AAED;;;;;AAKG;AACH,MAAM,MAAM,GAAgD,eAAe,MAAM,CAE/E,OAAO,GAAG,EAAE,EAAA;AAEZ,IAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;AACzB,IAAA,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;IACzB,IAAI,KAAK,CAAC,WAAW,EAAE;AACrB,QAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACrD,KAAA;IAED,IAAI,CAAC,MAAM,EAAE;AACX,QAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AACzB,QAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACrD,KAAA;;AAGD,IAAA,MAAM,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,MAAM,EAAE;QAC9C,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,KAAA,CAAC,CAAC;AACH,IAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAEzB,IAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;AAKG;AACH,MAAM,MAAM,GAAgD,eAAe,MAAM,CAE/E,OAAO,GAAG,EAAE,EAAA;AAEZ,IAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,uBAAuB,EAAE,GAAG,KAAK,CAAC;AAElE,IAAA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;AACpB,QAAA,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,gBAAgB,CAAC,UAAU,EAAE,uBAAuB,CAAC,CAAC;;AAGtF,QAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;AAC7B,QAAA,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE;AACnC,YAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;AACtB,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;QAC7B,IAAI;AACF,YAAA,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAC1F,YAAA,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,MAAM,CAAC;AAC5C,YAAA,MAAM,gBAAgB,GAAG,KAAK,CAAC,YAAY,CAAC;AAC5C,YAAA,IAAI,YAAY,EAAE;AAChB,gBAAA,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;AACnC,aAAA;YACD,IACE,UAAU,KAAK,SAAS;AACxB,gBAAA,YAAY,KAAK,gBAAgB;AACjC,gBAAA,OAAO,OAAO,CAAC,YAAY,KAAK,UAAU,EAC1C;;AAEA,gBAAA,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;AAC7B,aAAA;iBAAM,IAAI,UAAU,KAAK,SAAS,EAAE;AACnC,gBAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;AACtB,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;iBAAM,IAAI,UAAU,KAAK,QAAQ,EAAE;AAClC,gBAAA,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CACrB,CAAA,+BAAA,EAAkC,MAAM,CAAC,qBAAqB,IAAI,SAAS,CAAA,CAAA,CAAG,CAC/E,CAAC;AACF,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC;AAClB,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACF,KAAA;AAED,IAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;AAKG;AACH,MAAM,QAAQ,GAAkD,SAAS,QAAQ,GAAA;AAG/E,IAAA,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,KAAI;;QAE1D,IAAI,GAAG,KAAK,YAAY,EAAE;AACxB,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;AACf,KAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;AAGG;AACH,SAAS,qCAAqC,CAC5C,KAAoC,EAAA;IAEpC,OAAO;QACL,KAAK,EAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAO,KAAK,CAAE;QACnB,MAAM;QACN,QAAQ;QACR,MAAM;KACP,CAAC;AACJ;;AChPA;AACA;AAkBA;;;;;;AAMG;AACG,SAAU,aAAa,CAAC,MAAa,EAAA;AACzC,IAAA,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;AACrB,QAAA,MAAM,IAAI,UAAU,CAAC,CAAA,sCAAA,CAAwC,CAAC,CAAC;AAChE,KAAA;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;AACrC,QAAA,MAAM,IAAI,UAAU,CAClB,CAAA,iGAAA,CAAmG,CACpG,CAAC;AACH,KAAA;IACD,OAAO,MAAM,CAAC,KAAK;AACjB,UAAE,CAAA,MAAA,EAAS,MAAM,CAAC,MAAM,CAAI,CAAA,EAAA,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,CAAE,CAAA;AAC9D,UAAE,CAAS,MAAA,EAAA,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC;;ACtCA;AAYA;;AAEG;AACH,IAAK,WAGJ,CAAA;AAHD,CAAA,UAAK,WAAW,EAAA;AACd,IAAA,WAAA,CAAA,WAAA,CAAA,MAAA,CAAA,GAAA,CAAA,CAAA,GAAA,MAAI,CAAA;AACJ,IAAA,WAAA,CAAA,WAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AACP,CAAC,EAHI,WAAW,KAAX,WAAW,GAGf,EAAA,CAAA,CAAA,CAAA;AAED;;;;AAIG;MACU,KAAK,CAAA;AAqChB;;;AAGG;AACH,IAAA,WAAA,CAAmB,cAAsB,CAAC,EAAA;AAnC1C;;AAEG;QACK,IAAO,CAAA,OAAA,GAAW,CAAC,CAAC;AAE5B;;AAEG;QACK,IAAS,CAAA,SAAA,GAAW,CAAC,CAAC;AAE9B;;AAEG;QACK,IAAM,CAAA,MAAA,GAAW,CAAC,CAAC;AAE3B;;AAEG;QACK,IAAU,CAAA,UAAA,GAAgB,EAAE,CAAC;AAErC;;;AAGG;AACK,QAAA,IAAA,CAAA,KAAK,GAAgB,WAAW,CAAC,IAAI,CAAC;QAY5C,IAAI,WAAW,GAAG,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,mCAAmC,CAAC,CAAC;AAC3D,SAAA;AACD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,OAAO,GAAG,IAAIqB,mBAAY,EAAE,CAAC;KACnC;AAED;;;;AAIG;AACI,IAAA,YAAY,CAAC,SAAoB,EAAA;AACtC,QAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,YAAW;YAC9B,IAAI;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,MAAM,SAAS,EAAE,CAAC;gBAClB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,IAAI,CAAC,eAAe,EAAE,CAAC;AACxB,aAAA;AAAC,YAAA,OAAO,KAAU,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AACnC,aAAA;AACH,SAAC,CAAC,CAAC;KACJ;AAED;;;AAGG;AACI,IAAA,MAAM,EAAE,GAAA;AACb,QAAA,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAChC,YAAA,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;AAC1B,SAAA;QAED,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC3C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAEnC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,KAAI;AACjC,gBAAA,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;gBAC/B,MAAM,CAAC,KAAK,CAAC,CAAC;AAChB,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;KACJ;AAED;;;AAGG;IACK,aAAa,GAAA;QACnB,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;AACvC,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;;AAIG;IACK,eAAe,GAAA;AACrB,QAAA,IAAI,IAAI,CAAC,KAAK,KAAK,WAAW,CAAC,KAAK,EAAE;YACpC,OAAO;AACR,SAAA;QAED,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;AAC5C,YAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO;AACR,SAAA;AAED,QAAA,OAAO,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;AACtC,YAAA,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;AACvC,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,SAAS,EAAE,CAAC;AACb,aAAA;AAAM,iBAAA;gBACL,OAAO;AACR,aAAA;AACF,SAAA;KACF;AACF;;ACrJD;AAUA;;AAEG;AACG,MAAO,aAAc,SAAQN,eAAQ,CAAA;AAgBzC;;;;;;AAMG;AACH,IAAA,WAAA,CACU,OAAiB,EACjB,UAAkB,EAC1B,OAA8B,EAAA;QAE9B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJP,IAAO,CAAA,OAAA,GAAP,OAAO,CAAU;QACjB,IAAU,CAAA,UAAA,GAAV,UAAU,CAAQ;AAI1B,QAAA,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;AACnC,QAAA,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;AACrB,QAAA,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;;QAG3B,IAAI,aAAa,GAAG,CAAC,CAAC;AACtB,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;AAC9B,YAAA,aAAa,IAAI,GAAG,CAAC,UAAU,CAAC;AACjC,SAAA;AACD,QAAA,IAAI,aAAa,GAAG,IAAI,CAAC,UAAU,EAAE;AACnC,YAAA,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;AACpF,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,KAAK,CAAC,IAAa,EAAA;AACxB,QAAA,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,UAAU,EAAE;AAC7C,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjB,SAAA;QAED,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,GAAG,IAAI,CAAC,qBAAqB,CAAC;AACnC,SAAA;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,UAAU,EAAE;;YAE3D,MAAM,yBAAyB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;AAC3E,YAAA,MAAM,6BAA6B,GACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC;YAC7E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,6BAA6B,EAAE,yBAAyB,CAAC,CAAC;AACrF,YAAA,IAAI,SAAS,GAAG,IAAI,GAAG,CAAC,EAAE;;gBAExB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,IAAI,GAAG,CAAC,CAAC;gBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3F,gBAAA,IAAI,CAAC,iBAAiB,IAAI,IAAI,GAAG,CAAC,CAAC;AACnC,gBAAA,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;gBACrC,CAAC,GAAG,IAAI,CAAC;gBACT,MAAM;AACP,aAAA;AAAM,iBAAA;;AAEL,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,SAAS,CAAC;gBACvD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,SAAS,KAAK,6BAA6B,EAAE;;AAE/C,oBAAA,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;oBACnC,IAAI,CAAC,WAAW,EAAE,CAAC;AACpB,iBAAA;AAAM,qBAAA;AACL,oBAAA,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;AACtC,iBAAA;AACD,gBAAA,IAAI,CAAC,iBAAiB,IAAI,SAAS,CAAC;gBACpC,CAAC,IAAI,SAAS,CAAC;AAChB,aAAA;AACF,SAAA;AAED,QAAA,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AACtC,SAAA;AAAM,aAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,SAAA;KACF;AACF;;AC5GD;AAMA;;AAEG;AACH;AACA,MAAM,eAAe,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC;AAE/D;;;;;;;AAOG;MACU,YAAY,CAAA;AA4CvB,IAAA,WAAA,CAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB,EAAA;AA3CtE;;;AAGG;QACK,IAAO,CAAA,OAAA,GAAa,EAAE,CAAC;AAwC7B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;;QAGf,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;AAClC,YAAA,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,GAAG,QAAQ,GAAG,eAAe,GAAG,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,GAAG,GAAG,eAAe,CAAC;AACvB,aAAA;AACD,YAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,SAAA;AAED,QAAA,IAAI,OAAO,EAAE;AACX,YAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;AAClC,SAAA;KACF;AA5CD;;AAEG;AACH,IAAA,IAAW,IAAI,GAAA;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAyCD;;;;;;;;AAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB,EAAA;AAChD,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;AAElD,QAAA,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;AACrB,QAAA,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE;AAClC,YAAA,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC/B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;AAC1B,YAAA,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;AAClC,gBAAA,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;AAClB,aAAA;AACD,YAAA,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;AAClC,gBAAA,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;AAClB,aAAA;AACF,SAAA;;AAGD,QAAA,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACrB,QAAA,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACtB,YAAA,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AAC7C,SAAA;KACF;AAED;;;AAGG;IACI,iBAAiB,GAAA;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;KACnD;AACF;;ACpID;AAgBA;;;;;;;;;;;;;;;;;;;;;AAqBG;MACU,eAAe,CAAA;AAuF1B;;;;;;;;;;;AAWG;IACH,WACE,CAAA,QAAkB,EAClB,UAAkB,EAClB,UAAkB,EAClB,eAAgC,EAChC,WAAmB,EACnB,QAAyB,EAAA;AAlF3B;;AAEG;AACc,QAAA,IAAA,CAAA,OAAO,GAAiB,IAAIM,mBAAY,EAAE,CAAC;AAO5D;;AAEG;QACK,IAAM,CAAA,MAAA,GAAW,CAAC,CAAC;AAE3B;;AAEG;QACK,IAAW,CAAA,WAAA,GAAY,KAAK,CAAC;AAErC;;AAEG;QACK,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEjC;;AAEG;QACK,IAAyB,CAAA,yBAAA,GAAW,CAAC,CAAC;AAO9C;;AAEG;QACK,IAAU,CAAA,UAAA,GAAW,CAAC,CAAC;AAE/B;;;;;;AAMG;QACK,IAAmB,CAAA,mBAAA,GAAa,EAAE,CAAC;AAE3C;;AAEG;QACK,IAAgB,CAAA,gBAAA,GAAW,CAAC,CAAC;AAErC;;AAEG;QACK,IAAQ,CAAA,QAAA,GAAmB,EAAE,CAAC;AAEtC;;AAEG;QACK,IAAQ,CAAA,QAAA,GAAmB,EAAE,CAAC;QAsBpC,IAAI,UAAU,IAAI,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,CAAA,CAAE,CAAC,CAAC;AACpF,SAAA;QAED,IAAI,UAAU,IAAI,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,CAAA,CAAE,CAAC,CAAC;AACpF,SAAA;QAED,IAAI,WAAW,IAAI,CAAC,EAAE;AACpB,YAAA,MAAM,IAAI,UAAU,CAAC,iDAAiD,WAAW,CAAA,CAAE,CAAC,CAAC;AACtF,SAAA;AAED,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;KAC1B;AAED;;;;AAIG;AACI,IAAA,MAAM,EAAE,GAAA;QACb,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC3C,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,KAAI;gBAChC,IAAI,GAAG,OAAO,IAAI,KAAK,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC;AAC1E,gBAAA,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;AAEhC,gBAAA,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;AACvB,oBAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;AACvB,iBAAA;AACH,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;gBAChC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;AAClC,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;AAC3B,gBAAA,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;AACxB,gBAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAChC,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;AAC/B,gBAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;AACpB,gBAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;gBACtB,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;AAC/B,gBAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,IAAI,CAAC,uBAAuB,EAAE,CAAC;oBAC/B,OAAO;AACR,iBAAA;gBAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,yBAAyB,KAAK,CAAC,EAAE;AAC5D,oBAAA,IAAI,IAAI,CAAC,gBAAgB,GAAG,CAAC,IAAI,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,UAAU,EAAE;AACxE,wBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;AACzD,wBAAA,IAAI,CAAC,eAAe,CAAC,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAAE,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;6BAC7E,IAAI,CAAC,OAAO,CAAC;6BACb,KAAK,CAAC,MAAM,CAAC,CAAC;AAClB,qBAAA;AAAM,yBAAA,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnD,OAAO;AACR,qBAAA;AAAM,yBAAA;AACL,wBAAA,OAAO,EAAE,CAAC;AACX,qBAAA;AACF,iBAAA;AACH,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;AACK,IAAA,oBAAoB,CAAC,IAAY,EAAA;AACvC,QAAA,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACpC,QAAA,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC;KACtC;AAED;;;;AAIG;AACK,IAAA,kCAAkC,CAAC,MAAqB,EAAA;QAC9D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;AAC7F,SAAA;AAAM,aAAA;YACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;AAC9D,SAAA;AAED,QAAA,IAAI,CAAC,gBAAgB,IAAI,MAAM,CAAC,IAAI,CAAC;AACrC,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;;;;;;AAQG;IACK,WAAW,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;AAC/C,YAAA,IAAI,MAAoB,CAAC;AAEzB,YAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;AAC5B,gBAAA,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAG,CAAC;AAChC,gBAAA,IAAI,CAAC,kCAAkC,CAAC,MAAM,CAAC,CAAC;AACjD,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;AACrC,oBAAA,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;oBACnD,IAAI,CAAC,UAAU,EAAE,CAAC;AACnB,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,OAAO,KAAK,CAAC;AACd,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,uBAAuB,EAAE,CAAC;AAChC,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;AAGG;AACK,IAAA,MAAM,uBAAuB,GAAA;AACnC,QAAA,IAAI,MAAgC,CAAC;QACrC,GAAG;AACD,YAAA,IAAI,IAAI,CAAC,yBAAyB,IAAI,IAAI,CAAC,WAAW,EAAE;gBACtD,OAAO;AACR,aAAA;AAED,YAAA,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;AAC/B,YAAA,IAAI,MAAM,EAAE;AACV,gBAAA,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;AACrC,aAAA;AACF,SAAA,QAAQ,MAAM,EAAE;KAClB;AAED;;;;AAIG;IACK,MAAM,sBAAsB,CAAC,MAAoB,EAAA;AACvD,QAAA,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC;QAEjC,IAAI,CAAC,yBAAyB,EAAE,CAAC;AACjC,QAAA,IAAI,CAAC,MAAM,IAAI,YAAY,CAAC;QAE5B,IAAI;YACF,MAAM,IAAI,CAAC,eAAe,CACxB,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAChC,YAAY,EACZ,IAAI,CAAC,MAAM,GAAG,YAAY,CAC3B,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO;AACR,SAAA;QAED,IAAI,CAAC,yBAAyB,EAAE,CAAC;AACjC,QAAA,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;AACzB,QAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;KAC/B;AAED;;;;AAIG;AACK,IAAA,WAAW,CAAC,MAAoB,EAAA;AACtC,QAAA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;AAC5D,YAAA,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;AACxB,SAAA;KACF;AACF;;AC5UD;AAOA;;;;;;;;AAQG;AACI,eAAe,cAAc,CAClC,MAA6B,EAC7B,MAAc,EACd,MAAc,EACd,GAAW,EACX,QAAyB,EAAA;AAEzB,IAAA,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,MAAM,KAAK,GAAG,GAAG,GAAG,MAAM,CAAC;IAE3B,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;AAC3C,QAAA,MAAM,OAAO,GAAG,UAAU,CACxB,MAAM,MAAM,CAAC,IAAI,KAAK,CAAC,CAA+C,6CAAA,CAAA,CAAC,CAAC,EACxE,eAAe,CAChB,CAAC;AAEF,QAAA,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;YACzB,IAAI,GAAG,IAAI,KAAK,EAAE;gBAChB,YAAY,CAAC,OAAO,CAAC,CAAC;AACtB,gBAAA,OAAO,EAAE,CAAC;gBACV,OAAO;AACR,aAAA;AAED,YAAA,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;AACR,aAAA;AACD,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;;YAGD,MAAM,WAAW,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC;YAE5E,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,WAAW,CAAC,CAAC;YACnF,GAAG,IAAI,WAAW,CAAC;AACrB,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;YACpB,YAAY,CAAC,OAAO,CAAC,CAAC;YACtB,IAAI,GAAG,GAAG,KAAK,EAAE;gBACf,MAAM,CACJ,IAAI,KAAK,CACP,CAAA,4DAAA,EAA+D,GAAG,CAAA,aAAA,EAAgB,KAAK,CAAA,CAAE,CAC1F,CACF,CAAC;AACH,aAAA;AACD,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;YACzB,YAAY,CAAC,OAAO,CAAC,CAAC;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;AACL,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;AAQG;AACI,eAAe,eAAe,CACnC,MAA6B,EAC7B,MAAc,EACd,QAAyB,EAAA;AAEzB,IAAA,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IAEjC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,KAAI;AAC7C,QAAA,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;AACzB,YAAA,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;AACR,aAAA;AACD,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;AAED,YAAA,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,UAAU,EAAE;gBACnC,MAAM,CAAC,IAAI,KAAK,CAAC,4CAA4C,UAAU,CAAA,CAAE,CAAC,CAAC,CAAC;gBAC5E,OAAO;AACR,aAAA;AAED,YAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;AAC5C,YAAA,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC;AACtB,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;YACpB,OAAO,CAAC,GAAG,CAAC,CAAC;AACf,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AAC7B,KAAC,CAAC,CAAC;AACL,CAAC;AAyBD;;;;;;;AAOG;AACI,eAAe,qBAAqB,CACzC,EAAyB,EACzB,IAAY,EAAA;IAEZ,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;QAC3C,MAAM,EAAE,GAAGC,aAAE,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAEtC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,KAAI;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,KAAI;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;AAEH,QAAA,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAExB,QAAA,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACd,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;AAIG;AACI,MAAM,MAAM,GAAGC,eAAI,CAAC,SAAS,CAACD,aAAE,CAAC,IAAI,CAAC,CAAC;AAEvC,MAAM,kBAAkB,GAAGA,aAAE,CAAC,gBAAgB;;AC0qBrD;;;AAGG;AACG,MAAO,UAAW,SAAQ,aAAa,CAAA;AAqF3C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;AAEhC,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAAC7B,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;AAC5B,YAAA,IAAI,iBAAiB,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBAC9D,OAAO,GAAG,iBAA2C,CAAC;AACvD,aAAA;YACD,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AAED,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AACrB,QAAA,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,IAAI,CAAC,cAAc,EAAE;AAC3D,YAAA,IAAI,CAAC,+BAA+B,EAAE,EAAE;QAC1C,IAAI,CAAC,WAAW,GAAG,IAAIZ,MAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AAE9D,QAAA,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAW,CAAC;AACvF,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAW,CAAC;KAC1F;AAxKD;;AAEG;AACH,IAAA,IAAW,IAAI,GAAA;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;AAEG;AACH,IAAA,IAAW,aAAa,GAAA;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;KAC5B;AA8JD;;;;;;AAMG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;AAMG;AACI,IAAA,WAAW,CAAC,SAAiB,EAAA;AAClC,QAAA,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,SAAS,EACjC,SAAS,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,SAAS,CAC/C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;AAGG;IACI,mBAAmB,GAAA;QACxB,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACtD;AAED;;;AAGG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AAED;;;AAGG;IACI,iBAAiB,GAAA;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0DG;IACI,MAAM,QAAQ,CACnB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAA+B,EAAE,EAAA;;QAEjC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAE5E,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,CACzC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,cAAc,EAAE;AACd,oBAAA,kBAAkB,EAAEnB,eAAM,GAAG,SAAS,GAAG,OAAO,CAAC,UAAU;iBAC5D,EACD,KAAK,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,SAAS,GAAG,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAC5E,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB,EAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACjC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;YAEH,MAAM,UAAU,GACX,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,EAAA,CAC5F,CAAC;;YAEF,IAAI,CAACA,eAAM,EAAE;AACX,gBAAA,OAAO,UAAU,CAAC;AACnB,aAAA;;;;;;YAOD,IAAI,OAAO,CAAC,gBAAgB,KAAK,SAAS,IAAI,OAAO,CAAC,gBAAgB,GAAG,CAAC,EAAE;;AAE1E,gBAAA,OAAO,CAAC,gBAAgB,GAAG,mCAAmC,CAAC;AAChE,aAAA;AAED,YAAA,IAAI,GAAG,CAAC,aAAa,KAAK,SAAS,EAAE;AACnC,gBAAA,MAAM,IAAI,UAAU,CAAC,CAAA,kEAAA,CAAoE,CAAC,CAAC;AAC5F,aAAA;AAED,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;AACb,gBAAA,MAAM,IAAI,UAAU,CAAC,CAAA,wDAAA,CAA0D,CAAC,CAAC;AAClF,aAAA;YAED,OAAO,IAAI,oBAAoB,CAC7B,UAAU,EACV,OAAO,KAAa,KAAoC;;AACtD,gBAAA,MAAM,sBAAsB,GAA+B;oBACzD,qBAAqB,EAAE,OAAO,CAAC,UAAU;AACzC,oBAAA,wBAAwB,EAAE;wBACxB,OAAO,EAAE,OAAO,CAAC,UAAW,CAAC,OAAO,IAAI,GAAG,CAAC,IAAI;AAChD,wBAAA,eAAe,EAAE,OAAO,CAAC,UAAW,CAAC,eAAe;AACpD,wBAAA,WAAW,EAAE,OAAO,CAAC,UAAW,CAAC,WAAW;AAC5C,wBAAA,iBAAiB,EAAE,OAAO,CAAC,UAAW,CAAC,iBAAiB;AACxD,wBAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa;AAC1C,qBAAA;oBACD,KAAK,EAAE,aAAa,CAAC;AACnB,wBAAA,KAAK,EAAE,MAAM,GAAG,GAAG,CAAC,aAAc,GAAG,KAAK;AAC1C,wBAAA,MAAM,EAAE,KAAK;qBACd,CAAC;oBACF,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;oBAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ;oBAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB;iBACrC,CAAC;;;;;;;AASF,gBAAA,OAAO,CACL,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,CAC7B,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,sBAAsB,EACzB,EACF,kBAAmB,CAAC;AACxB,aAAC,EACD,MAAM,EACN,GAAG,CAAC,aAAc,EAClB;gBACE,gBAAgB,EAAE,OAAO,CAAC,gBAAgB;gBAC1C,UAAU,EAAE,OAAO,CAAC,UAAU;AAC/B,aAAA,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAA6B,EAAE,EAAA;AACjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;gBAChD,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,aAAA,CAAC,CAAC;AACH,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;AACf,YAAA,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;;AAExB,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAAM,iBAAA,IACL,CAAC,CAAC,UAAU,KAAK,GAAG;AACpB,iBAAC,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,sCAAsC;AAC7D,oBAAA,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,yCAAyC,CAAC,EACpE;;AAEA,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAoC,EAAE,EAAA;;AAEtC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,aAAa,CAC9C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EAAA,EACjC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;YAEH,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,EAC3F,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAA6B,EAAE,EAAA;;AACjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,EAClC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,cAAc,EAAE;gBAC3C,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,wEAAwE;AAClF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,QAAQ,CAAC,OAAA,GAA+B,EAAE,EAAA;AACrD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAC5E,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACpC,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;AAcG;AACI,IAAA,MAAM,cAAc,CACzB,eAAiC,EACjC,UAAqC,EAAE,EAAA;;AAEvC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,eAAe,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAGxC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,WAAW,CACtB,QAAmB,EACnB,UAAkC,EAAE,EAAA;;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CACvC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,OAAO,CAAC,IAAU,EAAE,UAA8B,EAAE,EAAA;;AAC/D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACnC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,KACrD,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,EAAA,CAAA,CACtB,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,OAAO,CAAC,OAAA,GAA8B,EAAE,EAAA;;AACnD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAA,MAAA,CAAA,MAAA,CAAA,EAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,IAAI,EAAE,MAAM,CAAC,EAAE,UAAU,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE,EAAA,CACxD,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,kBAAkB,CAAC,cAAuB,EAAA;AAC/C,QAAA,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;KAClD;AAED;;;;;AAKG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAqC,EAAE,EAAA;;AAEvC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,CAAA,MAAA,CAAA,MAAA,CAAA,EAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuEG;AACI,IAAA,MAAM,gBAAgB,CAC3B,UAAkB,EAClB,UAAuC,EAAE,EAAA;AAOzC,QAAA,MAAM,MAAM,GAAyB;AACnC,YAAA,gBAAgB,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;AAC7D,YAAA,aAAa,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;AACvD,YAAA,gBAAgB,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;SAC9D,CAAC;AACF,QAAA,MAAM,MAAM,GAAG,IAAI,0BAA0B,CAAC;AAC5C,YAAA,UAAU,EAAE,MAAM;YAClB,UAAU;YACV,YAAY,EAAE,OAAO,CAAC,YAAY;YAClC,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,UAAU,EAAE,OAAO,CAAC,UAAU;AAC9B,YAAA,uBAAuB,EAAE,OAAO;AACjC,SAAA,CAAC,CAAC;;;AAIH,QAAA,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;AAEpB,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,gBAAgB,CAC3B,MAAc,EACd,UAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,MAAM,EAAA,MAAA,CAAA,MAAA,CAAA,EACnD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACtC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,eAAe,CAC1B,UAAkB,EAClB,UAAsC,EAAE,EAAA;;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EAClD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;AACpE,iBAAA,EACD,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,cAAc,EAAE,OAAO,CAAC,cAAc,IACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,aAAa,CACxB,IAAkD,EAClD,UAA8B,EAAE,EAAA;;AAEhC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,YAAY,CAAC,IAAI,CAAE,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAE3C,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IA8CM,MAAM,gBAAgB,CAC3B,MAAwB,EACxB,MAAe,EACf,MAA6C,EAC7C,MAAA,GAAsC,EAAE,EAAA;AAExC,QAAA,IAAI,MAA0B,CAAC;QAC/B,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,MAAM,CAAC;QACrB,IAAI,MAAM,YAAY,MAAM,EAAE;YAC5B,MAAM,GAAG,MAAM,CAAC;AAChB,YAAA,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;AACrB,YAAA,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AACjD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AACjD,YAAA,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AAChD,YAAA,OAAO,GAAI,MAAsC,IAAI,EAAE,CAAC;AACzD,SAAA;AACD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;AACF,YAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,gBAAA,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;AACvB,aAAA;AACD,YAAA,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,EAAE;AACzB,gBAAA,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC;AACvD,aAAA;AACD,YAAA,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;AAC3B,gBAAA,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;AACvD,aAAA;YAED,IAAI,MAAM,GAAG,CAAC,EAAE;AACd,gBAAA,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,EAAE;AACvB,gBAAA,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC;AAC7D,aAAA;AAED,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,gBAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,aAAA;;YAGD,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACpC,OAAO,CACV,EAAA,EAAA,cAAc,kCACT,OAAO,CAAC,cAAc,CACtB,EAAA,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;AACH,gBAAA,KAAK,GAAG,QAAQ,CAAC,aAAc,GAAG,MAAM,CAAC;gBACzC,IAAI,KAAK,GAAG,CAAC,EAAE;oBACb,MAAM,IAAI,UAAU,CAClB,CAAU,OAAA,EAAA,MAAM,CAAuC,oCAAA,EAAA,QAAQ,CAAC,aAAc,CAAE,CAAA,CACjF,CAAC;AACH,iBAAA;AACF,aAAA;;YAGD,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI;AACF,oBAAA,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAC9B,iBAAA;AAAC,gBAAA,OAAO,KAAU,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,CAA0C,uCAAA,EAAA,KAAK,CAAqJ,kJAAA,EAAA,KAAK,CAAC,OAAO,CAAE,CAAA,CACpN,CAAC;AACH,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,MAAM,CAAC,MAAM,GAAG,KAAK,EAAE;AACzB,gBAAA,MAAM,IAAI,UAAU,CAClB,mFAAmF,KAAK,CAAA,CAAE,CAC3F,CAAC;AACH,aAAA;YAED,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;AAC7C,YAAA,KAAK,IAAI,GAAG,GAAG,MAAM,EAAE,GAAG,GAAG,MAAM,GAAG,KAAK,EAAE,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC,SAAS,EAAE;AAC1E,gBAAA,KAAK,CAAC,YAAY,CAAC,YAAW;;AAE5B,oBAAA,IAAI,QAAQ,GAAG,MAAM,GAAG,KAAM,CAAC;AAC/B,oBAAA,IAAI,GAAG,GAAG,OAAO,CAAC,SAAU,GAAG,QAAQ,EAAE;AACvC,wBAAA,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,SAAU,CAAC;AACrC,qBAAA;AACD,oBAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,QAAQ,GAAG,GAAG,EAAE;wBACxD,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,gBAAgB,EAAE,OAAO,CAAC,wBAAwB;wBAClD,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;wBAChD,cAAc,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,CACtD;AACF,qBAAA,CAAC,CAAC;AACH,oBAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,kBAAmB,CAAC;AAC5C,oBAAA,MAAM,cAAc,CAAC,MAAM,EAAE,MAAO,EAAE,GAAG,GAAG,MAAM,EAAE,QAAQ,GAAG,MAAM,CAAC,CAAC;;;;AAIvE,oBAAA,gBAAgB,IAAI,QAAQ,GAAG,GAAG,CAAC;oBACnC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACvD,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;AACjB,YAAA,OAAO,MAAM,CAAC;AACf,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;IACI,MAAM,cAAc,CACzB,QAAgB,EAChB,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,OAAA,GAA+B,EAAE,EAAA;AAEjC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,EAC7C,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,KACV,cAAc,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,OAAO,CAAC,cAAc,CAAA,EACtB,kCAAkC,CAAC,cAAc,CAAC,CAAA,EAAA,CAAA,CAEvD,CAAC;YACH,IAAI,QAAQ,CAAC,kBAAkB,EAAE;gBAC/B,MAAM,qBAAqB,CAAC,QAAQ,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;AACpE,aAAA;;AAGA,YAAA,QAAgB,CAAC,kBAAkB,GAAG,SAAS,CAAC;AACjD,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IAEO,+BAA+B,GAAA;AACrC,QAAA,IAAI,aAAa,CAAC;AAClB,QAAA,IAAI,QAAQ,CAAC;QACb,IAAI;;;;;;;;YASF,MAAM,SAAS,GAAGtB,mBAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAE7C,YAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;;gBAGjD,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AACtE,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;gBAIvC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC9E,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA;;;gBAGL,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AACtE,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;;AAGD,YAAA,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;AAClD,YAAA,QAAQ,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;;;YAIxC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAExC,IAAI,CAAC,aAAa,EAAE;AAClB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACvD,aAAA;AAED,YAAA,OAAO,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC;AACpC,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;AAC5F,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACK,IAAA,MAAM,gBAAgB,CAC5B,UAAkB,EAClB,UAAuC,EAAE,EAAA;;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;AACnE,oBAAA,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,aAAa;iBACrD,EACD,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAC5C,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,QAAQ,EAAE,OAAO,CAAC,QAAQ,EACvB,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEsB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,cAAc,CAAC,OAAkC,EAAA;AACtD,QAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;YAC7B,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,gBAAA,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,GAAG,GAAG,8BAA8B,CAEtC,MAAA,CAAA,MAAA,CAAA,EAAA,aAAa,EAAE,IAAI,CAAC,cAAc,EAClC,QAAQ,EAAE,IAAI,CAAC,KAAK,EACpB,YAAY,EAAE,IAAI,CAAC,SAAS,EAC5B,SAAS,EAAE,IAAI,CAAC,UAAU,EAAA,EACvB,OAAO,CAAA,EAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;IACI,MAAM,wBAAwB,CACnC,OAA6C,EAAA;AAE7C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,wBAAwB,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,KAAA,IAAA,IAAP,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,WAAW,EAAA,EAC9B,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,qBAAqB,CAChC,kBAA0C,EAC1C,OAA0C,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,qBAAqB,CAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EACjC,wBAAwB,EAAE,kBAAkB,CAAC,UAAU,EACvD,sBAAsB,EAAE,kBAAkB,CAAC,UAAU,EACrD,wBAAwB,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,uBAAuB,EACvD,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,YAAY,CACvB,gBAAyB,EACzB,OAAiC,EAAA;AAEjC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,gBAAgB,EACzD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EAAA,EAC9B,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AA4ND;;AAEG;AACG,MAAO,gBAAiB,SAAQ,UAAU,CAAA;AAsE9C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;;YAE5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KACpE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,gBAAgB,CACzB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;AAaG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAmC,EAAE,EAAA;;AACvD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,EAC1C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAAA,EAC3C,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAA,GAA8C,EAAE,EAAA;;AAEhD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oCAAoC,EAAE,OAAO,CAAC,CAAC;AAC3F,QAAA,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;QAC5C,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACxB,cAAc,CAAA,EAAA,EACjB,UAAU,EAAA,CAAA,CACV,CAAC;YACH,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,4EAA4E;AACtF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,IAAI,CAAC,OAAA,GAAiC,EAAE,EAAA;;AACnD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAA,MAAA,CAAA,MAAA,CAAA,EACtC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;AAuBG;IACI,MAAM,WAAW,CACtB,IAAqB,EACrB,aAAqB,EACrB,UAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,aAAa,EAAE,IAAI,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;AACrC,iBAAA,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;IACI,MAAM,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,KAAa,EACb,OAAA,GAA+C,EAAE,EAAA;;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,YAAA,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC3D,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAmkBD;;AAEG;AACG,MAAO,eAAgB,SAAQ,UAAU,CAAA;AA8E7C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;AAC5B,YAAA,IAAI,iBAAiB,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBAC9D,OAAO,GAAG,iBAA2C,CAAC;AACvD,aAAA;YACD,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAIZ,MAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAChE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BG;AACI,IAAA,MAAM,KAAK,CAChB,KAAa,EACb,UAAiC,EAAE,EAAA;;QAEnC,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAE9E,IAAI;YACF,IAAI,CAACnB,eAAM,EAAE;AACX,gBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;AAC3E,aAAA;YACD,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,CAC5C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,YAAY,EAAE;AACZ,oBAAA,SAAS,EAAE,KAAK;AAChB,oBAAA,UAAU,EAAE,KAAK;AACjB,oBAAA,kBAAkB,EAAE,oBAAoB,CAAC,OAAO,CAAC,sBAAsB,CAAC;AACxE,oBAAA,mBAAmB,EAAE,oBAAoB,CAAC,OAAO,CAAC,uBAAuB,CAAC;AAC3E,iBAAA,EACD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EAAA,EACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACH,YAAA,OAAO,IAAI,iBAAiB,CAAC,QAAQ,EAAE;gBACrC,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA,CAAC,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BG;IACI,MAAM,MAAM,CACjB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE,EAAA;;QAEpC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,EAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;AAiBG;AAEI,IAAA,MAAM,iBAAiB,CAC5B,SAAiB,EACjB,UAA6C,EAAE,EAAA;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAAE,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACzD,OAAO,CACV,EAAA,EAAA,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,aAAa,EAAA,CAAA,EAE1C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,OAAO;AAChD,oBAAA,qBAAqB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,eAAe;AAChE,oBAAA,iBAAiB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,WAAW;AACxD,oBAAA,uBAAuB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,iBAAiB;AACpE,oBAAA,YAAY,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,aAAa;iBACtD,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,cAAc,EAAE,OAAO,CAAC,cAAc,EACnC,CAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;IACI,MAAM,UAAU,CACrB,OAAe,EACf,IAAqB,EACrB,aAAqB,EACrB,OAAA,GAAsC,EAAE,EAAA;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,EAAE,aAAa,EAAE,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,EACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;AACrC,iBAAA,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;AAoBG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAe,EACf,SAAiB,EACjB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,UAA6C,EAAE,EAAA;AAE/C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,EAAE,SAAS,EACxE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,WAAW,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,SAAS,GAAG,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAClF,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC5E,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,eAAe,CAC1B,MAAgB,EAChB,UAA2C,EAAE,EAAA;;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,EAAE,MAAM,EAAE,MAAM,EAAE,kBAEhB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,YAAY,CACvB,QAAuB,EACvB,UAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,EAC3D,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE;AACxB,gBAAA,GAAG,CAAC,eAAe,GAAG,EAAE,CAAC;AAC1B,aAAA;AAED,YAAA,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE;AAC1B,gBAAA,GAAG,CAAC,iBAAiB,GAAG,EAAE,CAAC;AAC5B,aAAA;AAED,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;;AAID;;;;;;;;;;;;;;AAcG;AACI,IAAA,MAAM,UAAU,CACrB,IAAmD,EACnD,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;AACF,YAAA,IAAIrB,eAAM,EAAE;AACV,gBAAA,IAAI,MAAc,CAAC;gBACnB,IAAI,IAAI,YAAY,MAAM,EAAE;oBAC1B,MAAM,GAAG,IAAI,CAAC;AACf,iBAAA;qBAAM,IAAI,IAAI,YAAY,WAAW,EAAE;AACtC,oBAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5B,iBAAA;AAAM,qBAAA;oBACL,IAAI,GAAG,IAAuB,CAAC;AAC/B,oBAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;AACrE,iBAAA;AAED,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,KAAa,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAC7E,MAAM,CAAC,UAAU,EACjB,cAAc,CACf,CAAC;AACH,aAAA;AAAM,iBAAA;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACrC,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,KAAW,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;AAkBG;AACI,IAAA,MAAM,iBAAiB,CAC5B,WAAiD,EACjD,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC5C,YAAA,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAc,EAAE,IAAY,KAAW,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;AAcG;IACK,MAAM,sBAAsB,CAClC,WAA8D,EAC9D,IAAY,EACZ,UAA0C,EAAE,EAAA;AAE5C,QAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,YAAA,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,GAAG,gCAAgC,EAAE;AACjF,YAAA,MAAM,IAAI,UAAU,CAClB,wCAAwC,gCAAgC,CAAA,CAAE,CAC3E,CAAC;AACH,SAAA;QAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;AACjE,YAAA,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,CAAC;AAC9D,SAAA;AACD,QAAA,IACE,OAAO,CAAC,iBAAiB,GAAG,CAAC;AAC7B,YAAA,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,EAC5D;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,gDAAgD,gCAAgC,CAAA,CAAE,CACnF,CAAC;AACH,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;AAC3B,YAAA,IAAI,IAAI,GAAG,gCAAgC,GAAG,qBAAqB,EAAE;AACnE,gBAAA,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,CAAA,yCAAA,CAA2C,CAAC,CAAC;AAC1E,aAAA;AACD,YAAA,IAAI,IAAI,GAAG,OAAO,CAAC,iBAAiB,EAAE;gBACpC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC,CAAC;AAC5D,gBAAA,IAAI,OAAO,CAAC,SAAS,GAAG,iCAAiC,EAAE;AACzD,oBAAA,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;AACvD,iBAAA;AACF,aAAA;AACF,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;AAC5B,YAAA,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;AAC9B,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;AACF,YAAA,IAAI,IAAI,IAAI,OAAO,CAAC,iBAAiB,EAAE;AACrC,gBAAA,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;AACtE,aAAA;AAED,YAAA,MAAM,SAAS,GAAW,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;YACzE,IAAI,SAAS,GAAG,qBAAqB,EAAE;gBACrC,MAAM,IAAI,UAAU,CAClB,CAA6D,2DAAA,CAAA;oBAC3D,CAAmC,gCAAA,EAAA,qBAAqB,CAAE,CAAA,CAC7D,CAAC;AACH,aAAA;YAED,MAAM,SAAS,GAAa,EAAE,CAAC;AAC/B,YAAA,MAAM,aAAa,GAAGD,qBAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YAEjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;AAClC,gBAAA,KAAK,CAAC,YAAY,CAAC,YAAyB;oBAC1C,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;AAClD,oBAAA,MAAM,KAAK,GAAG,OAAO,CAAC,SAAU,GAAG,CAAC,CAAC;AACrC,oBAAA,MAAM,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,KAAK,GAAG,OAAO,CAAC,SAAU,CAAC;AACpE,oBAAA,MAAM,aAAa,GAAG,GAAG,GAAG,KAAK,CAAC;AAClC,oBAAA,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACxB,oBAAA,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAE,aAAa,EAAE;wBAC/E,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;wBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,qBAAA,CAAC,CAAC;;;oBAGH,gBAAgB,IAAI,aAAa,CAAC;oBAClC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAW,CAAC;AAClB,4BAAA,WAAW,EAAE,gBAAgB;AAC9B,yBAAA,CAAC,CAAC;AACJ,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YAEjB,OAAO,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;AACxD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACI,IAAA,MAAM,UAAU,CACrB,QAAgB,EAChB,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC;YAC3C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAM,EAAE,KAAK,KAAI;AAChB,gBAAA,OAAO,MACL,kBAAkB,CAAC,QAAQ,EAAE;AAC3B,oBAAA,SAAS,EAAE,IAAI;AACf,oBAAA,GAAG,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,GAAG,QAAQ;AAC1C,oBAAA,KAAK,EAAE,MAAM;AACd,iBAAA,CAAC,CAAC;AACP,aAAC,EACD,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAEC,OAAO,CAAA,EAAA,EACV,cAAc,EACT,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAG1D,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACI,IAAA,MAAM,YAAY,CACvB,MAAgB,EAChB,UAAA,GAAqB,+BAA+B,EACpD,cAAyB,GAAA,CAAC,EAC1B,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;AAC5B,YAAA,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;AAC9B,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IAAI;YACF,IAAI,QAAQ,GAAG,CAAC,CAAC;AACjB,YAAA,MAAM,aAAa,GAAGD,qBAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,SAAS,GAAa,EAAE,CAAC;AAE/B,YAAA,MAAM,SAAS,GAAG,IAAI,eAAe,CACnC,MAAM,EACN,UAAU,EACV,cAAc,EACd,OAAO,IAAI,EAAE,MAAM,KAAI;gBACrB,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACzD,gBAAA,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACxB,gBAAA,QAAQ,EAAE,CAAC;gBAEX,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE;oBAC3C,UAAU,EAAE,OAAO,CAAC,UAAU;oBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;oBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,iBAAA,CAAC,CAAC;;gBAGH,gBAAgB,IAAI,MAAM,CAAC;gBAC3B,IAAI,OAAO,CAAC,UAAU,EAAE;oBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACvD,iBAAA;aACF;;;;;AAKD,YAAA,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,CAAC,IAAI,CAAC,CAAC,CACpC,CAAC;AACF,YAAA,MAAM,SAAS,CAAC,EAAE,EAAE,CAAC;YAErB,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACtC,OAAO,CACV,EAAA,EAAA,cAAc,kCACT,OAAQ,CAAC,cAAc,CACvB,EAAA,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAoaD;;AAEG;AACG,MAAO,cAAe,SAAQ,UAAU,CAAA;AA8D5C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,eAAe,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAChE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,cAAc,CACvB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE,EAAA;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAC9C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,iBAAiB,CAC5B,IAAY,EACZ,UAA4C,EAAE,EAAA;;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;AACF,YAAA,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;AAC5C,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAC7B,OAAO,CACV,EAAA,EAAA,UAAU,EACV,cAAc,EAAE,cAAc,CAAC,cAAc,IAC7C,CAAC;YACH,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,4EAA4E;AACtF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;IACI,MAAM,WAAW,CACtB,IAAqB,EACrB,MAAc,EACd,KAAa,EACb,OAAA,GAAsC,EAAE,EAAA;;QAExC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,UAAkB,EAClB,KAAa,EACb,OAAA,GAA6C,EAAE,EAAA;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;AAC1D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAClD,SAAS,EACT,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC9C,CAAC,EACD,aAAa,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EAE1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC5E,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,UAAU,CACrB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAAqC,EAAE,EAAA;;QAEvC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,aAAa,CACxB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAAwC,EAAE,EAAA;;QAE1C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;AAC9B,iBAAA,aAAa,iBACZ,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAA,EACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;IACK,MAAM,qBAAqB,CACjC,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,MAAe,EACf,OAAA,GAAgD,EAAE,EAAA;;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,aAAa,CAC7C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACD;;;;;;;;;;;;;AAaG;IACY,yBAAyB,CACtC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,OAAA,GAAgD,EAAE,EAAA;;AAElD,YAAA,IAAI,gCAAgC,CAAC;AACrC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,gCAAgC,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,qBAAqB,CACjE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;AAC5D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,gCAAgC,CAAA,CAAA,CAAC;AAC9C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;AAMG;AACY,IAAA,kBAAkB,CAC/B,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,UAAgD,EAAE,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAyC,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBALU,MAAM,oBAAoB,WAAA,CAAA;AAMnC,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;AACxD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsEG;AACI,IAAA,cAAc,CACnB,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,UAAyC,EAAE,EAAA;QAE3C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;;AAE9C,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;gBACtC,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,EAC7E,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,OAAO,EACV,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;AASG;IACI,MAAM,iBAAiB,CAC5B,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,OAAA,GAA4C,EAAE,EAAA;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAE3C,CAAA,EAAA,YAAY,EAAE,YAAY,EAC1B,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;IACK,MAAM,yBAAyB,CACrC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD,EAAA;;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAChG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EACjC,qBAAqB,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,UAAU,EAC1C,wBAAwB,kCACnB,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,UAAU,CAAA,EAAA,EACtB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE5C,CAAA,EAAA,YAAY,EAAE,iBAAiB,EAC/B,KAAK,EAAE,aAAa,CAAC;AACnB,oBAAA,MAAM,EAAE,MAAM;AACd,oBAAA,KAAK,EAAE,KAAK;iBACb,CAAC,EACF,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EAC9B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACD;;;;;;;;;;;;;;;AAeG;IACY,6BAA6B,CAC1C,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD,EAAA;;AAElD,YAAA,IAAI,gCAAgC,CAAC;AACrC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,gCAAgC,GAAG,MAAME,aAAA,CAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;AAC5D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,gCAAgC,CAAA,CAAA,CAAC;AAC9C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;AAOG;AACY,IAAA,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,OAAkD,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAyC,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,6BAA6B,CACzE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBANU,MAAM,oBAAoB,WAAA,CAAA;AAOnC,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;AACxD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuEG;IACI,kBAAkB,CACvB,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA6C,EAAE,EAAA;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;;AAG9C,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,KAAK,EAAE,YAAY,EAC/D,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,EACV,CAAC;QACH,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;gBACtC,OAAO,IAAI,CAAC,6BAA6B,CACvC,MAAM,EACN,KAAK,EACL,YAAY,EACZ,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EAExB,WAAW,EAAE,QAAQ,CAAC,WAAW,EAAA,EAC9B,OAAO,CAAA,CAEb,CAAC;aACH;SACF,CAAC;KACH;AAED;;;;;;;;;AASG;IACI,MAAM,gCAAgC,CAC3C,MAAc,EACd,KAAa,EACb,eAAuB,EACvB,OAAA,GAA4C,EAAE,EAAA;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,iDAAiD,EACjD,OAAO,CACR,CAAC;QAEF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,eAAe,EACf,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE,EAAA;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,IAAI,EAC3C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,oBAAoB,CAC/B,oBAA8C,EAC9C,cAAuB,EACvB,UAA+C,EAAE,EAAA;;QAEjD,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,oBAAoB,CAAC,oBAAoB,EACzE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,kBAAkB,EAAE,cAAc,EAClC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACI,IAAA,MAAM,oBAAoB,CAC/B,UAAkB,EAClB,UAA+C,EAAE,EAAA;;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACv3LD;AAOO,eAAe,aAAa,CACjC,aAA8C,EAAA;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;;IAGF,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAEzC,IAAA,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAEK,SAAU,cAAc,CAAC,GAAW,EAAA;AACxC,IAAA,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC;;ACzBA;AAiBA,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,eAAe,GAAG,GAAG,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,CAAC;AAErB;;AAEG;MACU,mBAAmB,CAAA;IAO9B,WACE,CAAA,aAA8C,EAC9C,WAAyC,EAAA;AAEzC,QAAA,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE;;AAEhD,YAAA,MAAM,IAAI,UAAU,CAAC,mEAAmE,CAAC,CAAC;AAC3F,SAAA;QAED,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC,EAAE;;AAE1C,YAAA,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;AAClF,SAAA;AAED,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,aAAa,CAAC,WAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,iBAAiB,GAAG,CAAK,EAAA,EAAA,IAAI,CAAC,qBAAqB,CAAA,EAAG,gBAAgB,CAAA,CAAE,CAAC;QAC9E,IAAI,CAAC,mBAAmB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,qBAAqB,IAAI,CAAC;KAChE;;AAGM,IAAA,MAAM,kBAAkB,GAAA;;;QAG7B,IAAI,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,KAAK,iBAAiB,CAAC,aAAa,EAAE;AAC3E,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,kDAAA,EAAqD,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,CAAA,EAAA,CAAI,CAC7F,CAAC;AACH,SAAA;QAED,MAAM,kBAAkB,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnE,MAAM,YAAY,GAAG,kBAAkB;aACpC,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC;AAClC,aAAA,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;AAC7B,aAAA,KAAK,CAAC,CAAC,CAAC,CAAC;AACZ,QAAA,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC;;;;;QAM7C,IAAI,gBAAgB,KAAK,IAAI,CAAC,WAAW,CAAC,IAAI,IAAI,gBAAgB,KAAK,CAAC,EAAE;AACxE,YAAA,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;AAC7F,SAAA;AAED,QAAA,MAAM,wBAAwB,GAA4B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;QACtF,IAAI,0BAA0B,GAAW,CAAC,CAAC;QAC3C,IAAI,uBAAuB,GAAW,CAAC,CAAC;;QAGxC,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,gBAAgB,EAAE,KAAK,EAAE,EAAE;AACrD,YAAA,MAAM,WAAW,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;YACxC,MAAM,uBAAuB,GAAG,EAAsB,CAAC;AACvD,YAAA,uBAAuB,CAAC,OAAO,GAAG,IAAIjB,oBAAW,EAAE,CAAC;YAEpD,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,CAAG,EAAA,gBAAgB,CAAE,CAAA,CAAC,CAAC;YAC/D,IAAI,uBAAuB,GAAG,KAAK,CAAC;YACpC,IAAI,qBAAqB,GAAG,KAAK,CAAC;YAClC,IAAI,aAAa,GAAG,KAAK,CAAC;YAC1B,IAAI,SAAS,GAAG,SAAS,CAAC;AAE1B,YAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAI,CAAC,uBAAuB,EAAE;;oBAE5B,IAAI,YAAY,CAAC,UAAU,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;AACvD,wBAAA,SAAS,GAAG,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpE,qBAAA;;;AAID,oBAAA,IAAI,YAAY,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;wBAC7C,uBAAuB,GAAG,IAAI,CAAC;wBAE/B,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;wBACnD,uBAAuB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAA,uBAAuB,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;AAC/E,qBAAA;AAED,oBAAA,SAAS;AACV,iBAAA;AAED,gBAAA,IAAI,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;;oBAE9B,IAAI,CAAC,qBAAqB,EAAE;wBAC1B,qBAAqB,GAAG,IAAI,CAAC;AAC9B,qBAAA;AAED,oBAAA,SAAS;AACV,iBAAA;;gBAGD,IAAI,CAAC,qBAAqB,EAAE;oBAC1B,IAAI,YAAY,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;;wBAEtD,MAAM,IAAI,KAAK,CACb,CAAA,oCAAA,EAAuC,YAAY,CAAoC,iCAAA,EAAA,qBAAqB,CAAI,EAAA,CAAA,CACjH,CAAC;AACH,qBAAA;;oBAGD,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;AACzD,oBAAA,uBAAuB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,eAAe,CAAC,eAAe,EAAE;AACjD,wBAAA,uBAAuB,CAAC,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC9C,aAAa,GAAG,IAAI,CAAC;AACtB,qBAAA;AACF,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,IAAI,CAAC,uBAAuB,CAAC,UAAU,EAAE;AACvC,wBAAA,uBAAuB,CAAC,UAAU,GAAG,EAAE,CAAC;AACzC,qBAAA;AAED,oBAAA,uBAAuB,CAAC,UAAU,IAAI,YAAY,CAAC;AACpD,iBAAA;AACF,aAAA;;;;;YAMD,IACE,SAAS,KAAK,SAAS;AACvB,gBAAA,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;AAC3B,gBAAA,SAAS,IAAI,CAAC;AACd,gBAAA,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI;AACjC,gBAAA,wBAAwB,CAAC,SAAS,CAAC,KAAK,SAAS,EACjD;gBACA,uBAAuB,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;AACpE,gBAAA,wBAAwB,CAAC,SAAS,CAAC,GAAG,uBAAuB,CAAC;AAC/D,aAAA;AAAM,iBAAA;gBACL,MAAM,CAAC,KAAK,CACV,CAAA,aAAA,EAAgB,KAAK,CAAuE,oEAAA,EAAA,SAAS,CAAE,CAAA,CACxG,CAAC;AACH,aAAA;AAED,YAAA,IAAI,aAAa,EAAE;AACjB,gBAAA,uBAAuB,EAAE,CAAC;AAC3B,aAAA;AAAM,iBAAA;AACL,gBAAA,0BAA0B,EAAE,CAAC;AAC9B,aAAA;AACF,SAAA;QAED,OAAO;AACL,YAAA,YAAY,EAAE,wBAAwB;AACtC,YAAA,0BAA0B,EAAE,0BAA0B;AACtD,YAAA,uBAAuB,EAAE,uBAAuB;SACjD,CAAC;KACH;AACF;;ACrLD;AACA;AAEA,IAAK,eAGJ,CAAA;AAHD,CAAA,UAAK,eAAe,EAAA;AAClB,IAAA,eAAA,CAAA,eAAA,CAAA,QAAA,CAAA,GAAA,CAAA,CAAA,GAAA,QAAM,CAAA;AACN,IAAA,eAAA,CAAA,eAAA,CAAA,UAAA,CAAA,GAAA,CAAA,CAAA,GAAA,UAAQ,CAAA;AACV,CAAC,EAHI,eAAe,KAAf,eAAe,GAGnB,EAAA,CAAA,CAAA,CAAA;AAID;;AAEG;MACU,KAAK,CAAA;AAChB;;;;;AAKG;AACI,IAAA,aAAa,IAAI,CAAC,GAAW,EAAA;AAClC,QAAA,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,KAAI;AACnC,YAAA,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,QAAQ,EAAE;gBAC/E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;AACxC,gBAAA,OAAO,EAAE,CAAC;AACX,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,aAAa,CAAC,GAAG,EAAE,MAAK;oBAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;AACxC,oBAAA,OAAO,EAAE,CAAC;AACZ,iBAAC,CAAC,CAAC;AACJ,aAAA;AACH,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;AACI,IAAA,aAAa,MAAM,CAAC,GAAW,EAAA;AACpC,QAAA,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,KAAI;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,MAAM,EAAE;AAC7C,gBAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;AAC3B,aAAA;AACD,YAAA,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtB,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;KACJ;AAKO,IAAA,OAAO,aAAa,CAAC,GAAW,EAAE,OAAiB,EAAA;QACzD,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;YACrC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AACjC,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACnC,SAAA;KACF;IAEO,OAAO,eAAe,CAAC,GAAW,EAAA;AACxC,QAAA,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACvE,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;YAC5C,YAAY,CAAC,MAAK;AAChB,gBAAA,OAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACtB,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;;AAlBc,KAAI,CAAA,IAAA,GAAuC,EAAE,CAAC;AAC9C,KAAS,CAAA,SAAA,GAAkC,EAAE;;AClD9D;AAoDA;;;AAGG;MACU,SAAS,CAAA;AAKpB,IAAA,WAAA,GAAA;QAHiB,IAAK,CAAA,KAAA,GAAW,OAAO,CAAC;AAIvC,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,iBAAiB,EAAE,CAAC;KAC7C;AAED;;;;AAIG;IACI,uBAAuB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,uBAAuB,EAAE,CAAC;KACpD;AAED;;AAEG;IACI,kBAAkB,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,CAAC;KAC/C;AAED;;AAEG;IACI,cAAc,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;KAC3C;AAEO,IAAA,MAAM,qBAAqB,CACjC,UAA2B,EAC3B,sBAA2C,EAAA;QAE3C,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE7B,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC;YAC/C,MAAM,sBAAsB,EAAE,CAAC;AAC/B,YAAA,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;AACjD,SAAA;AAAS,gBAAA;YACR,MAAM,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AAChC,SAAA;KACF;AAEO,IAAA,YAAY,CAAC,SAAqC,EAAA;AACxD,QAAA,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;AACnB,YAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC5B,SAAA;AACD,QAAA,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;YAChC,MAAM,IAAI,UAAU,CAClB,CAAA,sFAAA,EAAyF,IAAI,CAAC,SAAS,CAAc,YAAA,CAAA,CACtH,CAAC;AACH,SAAA;KACF;AAqCM,IAAA,MAAM,UAAU,CACrB,eAAoC,EACpC,mBAKa,EACb,OAA2B,EAAA;AAE3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,UAA8E,CAAC;QAEnF,IACE,OAAO,eAAe,KAAK,QAAQ;AACnC,aAAC,CAACJ,eAAM,IAAI,mBAAmB,YAAY,0BAA0B;AACnE,gBAAA,mBAAmB,YAAY,mBAAmB;AAClD,gBAAAe,0BAAiB,CAAC,mBAAmB,CAAC,CAAC,EACzC;;YAEA,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,mBAAmB,CAAC;AAClC,SAAA;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;;AAEhD,YAAA,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AAC1B,YAAA,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,OAAO,GAAG,mBAAwC,CAAC;AACpD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;AACH,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC5B,MAAM,IAAI,CAAC,qBAAqB,CAC9B;AACE,gBAAA,GAAG,EAAE,GAAG;AACR,gBAAA,UAAU,EAAE,UAAU;aACvB,EACD,YAAW;AACT,gBAAA,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAC5E,cAAc,CACf,CAAC;AACJ,aAAC,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEM,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IAgDM,MAAM,iBAAiB,CAC5B,eAAoC,EACpC,gBAIc,EACd,aAA+C,EAC/C,OAA4B,EAAA;AAE5B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,UAA8E,CAAC;AACnF,QAAA,IAAI,IAAgB,CAAC;QAErB,IACE,OAAO,eAAe,KAAK,QAAQ;AACnC,aAAC,CAACrB,eAAM,IAAI,gBAAgB,YAAY,0BAA0B;AAChE,gBAAA,gBAAgB,YAAY,mBAAmB;AAC/C,gBAAAe,0BAAiB,CAAC,gBAAgB,CAAC,CAAC,EACtC;;YAEA,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,gBAGM,CAAC;YACpB,IAAI,GAAG,aAA2B,CAAC;AACpC,SAAA;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;;AAEhD,YAAA,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AAC1B,YAAA,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,IAAI,GAAG,gBAA8B,CAAC;YACtC,OAAO,GAAG,aAAmC,CAAC;AAC/C,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;AACH,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;YACnC,MAAM,IAAI,CAAC,qBAAqB,CAC9B;AACE,gBAAA,GAAG,EAAE,GAAG;AACR,gBAAA,UAAU,EAAE,UAAU;aACvB,EACD,YAAW;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CACnF,IAAI,EACJ,cAAc,CACf,CAAC;AACJ,aAAC,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEM,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAED;;;AAGG;AACH,MAAM,iBAAiB,CAAA;AASrB,IAAA,WAAA,GAAA;AACE,QAAA,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;AACxB,QAAA,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;AAEf,QAAA,MAAM,QAAQ,GAAGD,qBAAY,EAAE,CAAC;;AAGhC,QAAA,IAAI,CAAC,QAAQ,GAAG,CAAS,MAAA,EAAA,QAAQ,EAAE,CAAC;;;;QAIpC,IAAI,CAAC,gBAAgB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,QAAQ,GAAG,gBAAgB,CAAA,EAAG,eAAe,CAAC,YAAY,qBAAqB,gBAAgB,CAAA,EAAG,eAAe,CAAC,yBAAyB,UAAU,CAAC;;QAExL,IAAI,CAAC,oBAAoB,GAAG,CAAA,0BAAA,EAA6B,IAAI,CAAC,QAAQ,EAAE,CAAC;;QAEzE,IAAI,CAAC,kBAAkB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,QAAQ,IAAI,CAAC;AAEjD,QAAA,IAAI,CAAC,WAAW,GAAG,IAAI,GAAG,EAAE,CAAC;KAC9B;AAED;;;;;;AAMG;AACI,IAAA,cAAc,CACnB,UAA8E,EAAA;AAE9E,QAAA,MAAM,gBAAgB,GAAG,UAAU,YAAY,mBAAmB,CAAC;AACnE,QAAA,MAAM,mBAAmB,GAAG,CAAC,IAAI,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3D,QAAA,MAAM,SAAS,GAA2B,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAEzE,SAAS,CAAC,CAAC,CAAC,GAAGT,8BAAqB,EAAE,CAAC;QACvC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,8BAA8B,EAAE,CAAC;QACpD,IAAI,CAAC,gBAAgB,EAAE;AACrB,YAAA,SAAS,CAAC,CAAC,CAAC,GAAGI,0BAAiB,CAAC,UAAU,CAAC;kBACxC,gBAAgB,CACdmB,wCAA+B,CAAC,UAAU,EAAE,kBAAkB,CAAC,EAC/D,UAAU,CACX;kBACD,UAAU,CAAC;AAChB,SAAA;AACD,QAAA,SAAS,CAAC,mBAAmB,GAAG,CAAC,CAAC,GAAG,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC;AAEjF,QAAA,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;KACpC;AAEM,IAAA,sBAAsB,CAAC,OAAoB,EAAA;;QAEhD,IAAI,CAAC,IAAI,IAAI;AACX,YAAA,IAAI,CAAC,gBAAgB;AACrB,YAAA,CAAA,EAAG,eAAe,CAAC,UAAU,KAAK,IAAI,CAAC,cAAc,CAAE,CAAA;YACvD,EAAE;AACF,YAAA,CAAA,EAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAChD,OAAO,CAAC,GAAG,CACZ,CAAI,CAAA,EAAA,gBAAgB,GAAG,gBAAgB,CAAA,CAAE;AAC3C,SAAA,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAEzB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;AACnD,YAAA,IAAI,CAAC,IAAI,IAAI,CAAA,EAAG,MAAM,CAAC,IAAI,CAAK,EAAA,EAAA,MAAM,CAAC,KAAK,CAAG,EAAA,gBAAgB,EAAE,CAAC;AACnE,SAAA;AAED,QAAA,IAAI,CAAC,IAAI,IAAI,gBAAgB,CAAC;;;KAG/B;AAEM,IAAA,gBAAgB,CAAC,UAA2B,EAAA;AACjD,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,iBAAiB,EAAE;AAC5C,YAAA,MAAM,IAAI,UAAU,CAAC,iBAAiB,iBAAiB,CAAA,+BAAA,CAAiC,CAAC,CAAC;AAC3F,SAAA;;QAGD,MAAM,IAAI,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AACxC,QAAA,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACxB,MAAM,IAAI,UAAU,CAAC,CAAA,8BAAA,EAAiC,UAAU,CAAC,GAAG,CAAG,CAAA,CAAA,CAAC,CAAC;AAC1E,SAAA;KACF;AAEM,IAAA,iBAAiB,CAAC,UAA2B,EAAA;QAClD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;QACtD,IAAI,CAAC,cAAc,EAAE,CAAC;KACvB;;IAGM,kBAAkB,GAAA;QACvB,OAAO,CAAA,EAAG,IAAI,CAAC,IAAI,CAAA,EAAG,IAAI,CAAC,kBAAkB,CAAA,EAAG,gBAAgB,CAAA,CAAE,CAAC;KACpE;IAEM,uBAAuB,GAAA;QAC5B,OAAO,IAAI,CAAC,oBAAoB,CAAC;KAClC;IAEM,cAAc,GAAA;QACnB,OAAO,IAAI,CAAC,WAAW,CAAC;KACzB;AACF,CAAA;AAED,MAAM,0BAA2B,SAAQjC,0BAAiB,CAAA;AAQxD,IAAA,WAAA,CACE,YAA+B,EAC/B,UAAyB,EACzB,OAA6B,EAAA;AAE7B,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAXZ,QAAA,IAAA,CAAA,aAAa,GAA0B;YACtD,OAAO,EAAE,IAAIkC,oBAAW,EAAE;AAC1B,YAAA,MAAM,EAAE,GAAG;YACX,OAAO,EAAE,IAAI/B,oBAAW,EAAE;SAC3B,CAAC;AASA,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,MAAM,IAAI,CAAC,YAAY,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC;AAExD,QAAA,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AACF,CAAA;AAED,MAAM,iCAAiC,CAAA;AAGrC,IAAA,WAAA,CAAY,YAA+B,EAAA;AACzC,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;QAE7B,OAAO,IAAI,0BAA0B,CAAC,IAAI,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;KAC/E;AACF,CAAA;AAED,MAAM,uBAAwB,SAAQH,0BAAiB,CAAA;;;IAGrD,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;IAEM,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,IAAI,aAAa,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC,YAAY,CAAC,EAAE;AACrD,gBAAA,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC;AAC7B,aAAA;AACF,SAAA;QAED,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;AACvC,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF,CAAA;AAED,MAAM,8BAA8B,CAAA;IAC3B,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;AACpE,QAAA,OAAO,IAAI,uBAAuB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KACzD;AACF;;AC9fD;AAuDA;;;;AAIG;MACU,eAAe,CAAA;IA8B1B,WACE,CAAA,GAAW,EACX,oBAIgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;AACjC,SAAA;aAAM,IAAI,CAAC,oBAAoB,EAAE;;YAEhC,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA;AACL,YAAA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;AACvD,SAAA;AAED,QAAA,MAAM,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;AAE9F,QAAA,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;AAC7B,QAAA,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,EAAE;;YAExB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,yBAAyB,GAAG,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACpE,SAAA;KACF;AAED;;;AAGG;IACI,WAAW,GAAA;QAChB,OAAO,IAAI,SAAS,EAAE,CAAC;KACxB;AAsCM,IAAA,MAAM,WAAW,CACtB,iBAA0C,EAC1C,mBAKa;;;IAGb,OAA2B,EAAA;AAE3B,QAAA,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;AAC9B,QAAA,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;AAC/C,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAsC,EAAE,OAAO,CAAC,CAAC;AAC1F,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAwC,CAAC,CAAC;AACnF,aAAA;AACF,SAAA;AACD,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAChC;AAkDM,IAAA,MAAM,kBAAkB,CAC7B,iBAA0C,EAC1C,gBAIc,EACd,aAA+C;;;IAG/C,OAA4B,EAAA;AAE5B,QAAA,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;AAC9B,QAAA,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;AAC/C,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;AACvC,gBAAA,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAAmC,EACnC,aAA2B,EAC3B,OAAO,CACR,CAAC;AACH,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAA8B,EAC9B,aAAmC,CACpC,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAChC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCG;AACI,IAAA,MAAM,WAAW,CACtB,YAAuB,EACvB,UAA8C,EAAE,EAAA;QAEhD,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE;AAC7D,YAAA,MAAM,IAAI,UAAU,CAAC,wDAAwD,CAAC,CAAC;AAChF,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;AACF,YAAA,MAAM,gBAAgB,GAAG,YAAY,CAAC,kBAAkB,EAAE,CAAC;;AAG3D,YAAA,MAAM,gBAAgB,GACpB,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC9C,cAAc,CAAC,gBAAgB,CAAC,EAChC,YAAY,CAAC,uBAAuB,EAAE,EACtC,gBAAgB,EAEX,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;;AAGJ,YAAA,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,CACjD,gBAAgB,EAChB,YAAY,CAAC,cAAc,EAAE,CAC9B,CAAC;AACF,YAAA,MAAM,eAAe,GAAG,MAAM,mBAAmB,CAAC,kBAAkB,EAAE,CAAC;AAEvE,YAAA,MAAM,GAAG,GAAiC;gBACxC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,WAAW,EAAE,gBAAgB,CAAC,WAAW;gBACzC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,eAAe,EAAE,gBAAgB,CAAC,eAAe;gBACjD,OAAO,EAAE,gBAAgB,CAAC,OAAO;gBACjC,YAAY,EAAE,eAAe,CAAC,YAAY;gBAC1C,0BAA0B,EAAE,eAAe,CAAC,0BAA0B;gBACtE,uBAAuB,EAAE,eAAe,CAAC,uBAAuB;aACjE,CAAC;AAEF,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEoB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;AC+QD;;AAEG;AACG,MAAO,eAAgB,SAAQ,aAAa,CAAA;IAgEhD,WACE,CAAA,qBAA6B,EAC7B,mCAKgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;AAC5B,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;AAE1D,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;AACF,oBAAA,GAAG,GAAG,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,CAAC;AAE7E,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC;wBACtE,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;AAC5E,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AACrB,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAClE;AApID;;AAEG;AACH,IAAA,IAAW,aAAa,GAAA;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;KAC5B;AAiID;;;;;;;;;;;;;;;;AAgBG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAkC,EAAE,EAAA;AACtD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;;;AAGF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACpC,OAAO,CAAA,EACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAA,GAAkC,EAAE,EAAA;;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,wBAAwB,EAAE;gBACrD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EACL,iFAAiF;AACpF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAkC,EAAE,EAAA;AACtD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,aAAA,CAAC,CAAC;AACH,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;AACf,YAAA,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,sDAAsD;AAChE,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,aAAa,CAAC,QAAgB,EAAA;AACnC,QAAA,OAAO,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACvF;AAED;;;;AAIG;AACI,IAAA,mBAAmB,CAAC,QAAgB,EAAA;AACzC,QAAA,OAAO,IAAI,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC7F;AAED;;;;;;;;;;;;;;AAcG;AACI,IAAA,kBAAkB,CAAC,QAAgB,EAAA;AACxC,QAAA,OAAO,IAAI,eAAe,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC5F;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,QAAgB,EAAA;AACvC,QAAA,OAAO,IAAI,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC3F;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAyC,EAAE,EAAA;AAE3C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;QACtF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,OAAO,CAAC,UAAU,CAAA,EAClB,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,MAAM,CACjB,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CACvC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,gCAAgC,EAAE,OAAO,CAAC,CAAC;QAEvF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,iEAAiE;AAC3E,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,WAAW,CACtB,QAAmB,EACnB,UAAuC,EAAE,EAAA;AAEzC,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,UAAU,CAAC,iBAAiB,EAAE;AACxC,YAAA,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;AACH,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EAAE,OAAO,CAAC,UAAU,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,eAAe,CAC1B,OAAA,GAA2C,EAAE,EAAA;AAE7C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QAExF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAA,MAAA,CAAA,MAAA,CAAA,EAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACtC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AAEH,YAAA,MAAM,GAAG,GAAqC;gBAC5C,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,gBAAgB,EAAE,QAAQ,CAAC,gBAAgB;gBAC3C,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,YAAY,EAAE,QAAQ,CAAC,YAAY;gBACnC,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe;AACzC,gBAAA,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,QAAQ,CAAC,OAAO;aAC1B,CAAC;AAEF,YAAA,KAAK,MAAM,UAAU,IAAI,QAAQ,EAAE;gBACjC,IAAI,YAAY,GAAQ,SAAS,CAAC;gBAClC,IAAI,UAAU,CAAC,YAAY,EAAE;AAC3B,oBAAA,YAAY,GAAG;AACb,wBAAA,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;qBACjD,CAAC;AAEF,oBAAA,IAAI,UAAU,CAAC,YAAY,CAAC,SAAS,EAAE;AACrC,wBAAA,YAAY,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AACtE,qBAAA;AAED,oBAAA,IAAI,UAAU,CAAC,YAAY,CAAC,QAAQ,EAAE;AACpC,wBAAA,YAAY,CAAC,QAAQ,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;AACpE,qBAAA;AACF,iBAAA;AAED,gBAAA,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC;oBACzB,YAAY;oBACZ,EAAE,EAAE,UAAU,CAAC,EAAE;AAClB,iBAAA,CAAC,CAAC;AACJ,aAAA;AAED,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;AAgBG;IACI,MAAM,eAAe,CAC1B,MAAyB,EACzB,YAAiC,EACjC,UAA2C,EAAE,EAAA;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,GAAG,GAA4B,EAAE,CAAC;AACxC,YAAA,KAAK,MAAM,UAAU,IAAI,YAAY,IAAI,EAAE,EAAE;gBAC3C,GAAG,CAAC,IAAI,CAAC;AACP,oBAAA,YAAY,EAAE;AACZ,wBAAA,SAAS,EAAE,UAAU,CAAC,YAAY,CAAC,SAAS;8BACxC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC;AACzD,8BAAE,EAAE;AACN,wBAAA,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;AAChD,wBAAA,QAAQ,EAAE,UAAU,CAAC,YAAY,CAAC,QAAQ;8BACtC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;AACxD,8BAAE,EAAE;AACP,qBAAA;oBACD,EAAE,EAAE,UAAU,CAAC,EAAE;AAClB,iBAAA,CAAC,CAAC;AACJ,aAAA;AAED,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACN,YAAY,EAAE,GAAG,EACjB,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,kBAAkB,CAAC,cAAuB,EAAA;AAC/C,QAAA,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;KAClD;AAED;;;;;;;;;;;;;;;;;;;;;AAqBG;IACI,MAAM,eAAe,CAC1B,QAAgB,EAChB,IAAqB,EACrB,aAAqB,EACrB,OAAA,GAAkC,EAAE,EAAA;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,CAAC;AAC1D,YAAA,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,cAAc,CAAC,CAAC;YACnF,OAAO;gBACL,eAAe;gBACf,QAAQ;aACT,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,UAAU,CACrB,QAAgB,EAChB,UAAsC,EAAE,EAAA;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC9C,IAAI,OAAO,CAAC,SAAS,EAAE;gBACrB,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;AACxD,aAAA;AACD,YAAA,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AAChD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACK,IAAA,MAAM,mBAAmB,CAC/B,MAAe,EACf,UAA4C,EAAE,EAAA;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAC9D,MAAM,EAAA,EACH,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACJ,QAAQ,CAAC,SAAS,CACrB,EAAA,EAAA,UAAU,EAAE,qCAAqC,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,EAElF,CAAA,EAAA,OAAO,kCACF,QAAQ,CAAC,OAAO,CACnB,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AAC5D,wBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;AACF,wBAAA,OAAO,QAAQ,CAAC;qBACjB,CAAC,MAEL,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;IACK,MAAM,wBAAwB,CACpC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE,EAAA;;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QACF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,CAAC,SAAS,gCAC7E,MAAM,EAAA,EACH,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACJ,QAAQ,CAAC,SAAS,CACrB,EAAA,EAAA,UAAU,EAAE,0CAA0C,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,EAEvF,CAAA,EAAA,OAAO,kCACF,QAAQ,CAAC,OAAO,CACnB,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AAC5D,wBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;AACF,wBAAA,OAAO,QAAQ,CAAC;AAClB,qBAAC,CAAC,EACF,YAAY,EAAE,CAAA,EAAA,GAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,KAAI;AACtE,wBAAA,MAAM,UAAU,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACX,kBAAkB,CAAA,EAAA,EACrB,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC,GAChD,CAAC;AACF,wBAAA,OAAO,UAAU,CAAC;qBACnB,CAAC,MAEL,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACY,IAAA,YAAY,CACzB,MAAe,EACf,OAAA,GAA4C,EAAE,EAAA;;AAE9C,YAAA,IAAI,4BAA4B,CAAC;AACjC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,4BAA4B,GAAG,MAAME,aAAA,CAAA,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;AAC/E,oBAAA,MAAM,GAAG,4BAA4B,CAAC,iBAAiB,CAAC;AACxD,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,4BAA4B,CAAA,CAAA,CAAC;AAC1C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;AAIG;IACY,SAAS,CACtB,UAA4C,EAAE,EAAA;;;AAE9C,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAiD,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAAxE,MAAM,4BAA4B,WAAA,CAAA;AAC3C,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,4BAA4B,CAAC,OAAO,CAAC,SAAS,CAAA,CAAA,CAAA,CAAC;AACvD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqEG;IACI,aAAa,CAClB,UAAqC,EAAE,EAAA;QAEvC,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;AAClC,YAAA,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAClC,SAAA;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;AACtC,YAAA,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;AACrC,SAAA;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;AACrC,YAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACpC,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;AACD,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,GAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,EACnD,CAAC;;QAGF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAC5C,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,cAAc,EACjB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;;AAYG;AACY,IAAA,qBAAqB,CAClC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE,EAAA;;AAE9C,YAAA,IAAI,iCAAiC,CAAC;AACtC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,iCAAiC,GAAG,MAAAT,aAAA,CAAM,IAAI,CAAC,wBAAwB,CACrE,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,iCAAiC,CAAC,iBAAiB,CAAC;AAC7D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,iCAAiC,CAAA,CAAA,CAAC;AAC/C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;AAKG;AACY,IAAA,oBAAoB,CACjC,SAAiB,EACjB,OAAA,GAA4C,EAAE,EAAA;;;AAE9C,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAsD,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,qBAAqB,CAC9E,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,iCAAiC,WAAA,CAAA;AAKhD,oBAAA,MAAM,OAAO,GAAG,iCAAiC,CAAC,OAAO,CAAC;oBAC1D,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,wBAAA,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,YAAY,EAAE;AACzC,4BAAA,MAAA,MAAAA,aAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,IAAI,EAAE,QAAQ,EACX,EAAA,MAAM,EACV,CAAC;AACH,yBAAA;AACF,qBAAA;AACD,oBAAA,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,SAAS,EAAE;AACpC,wBAAA,MAAA,MAAAA,aAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAQ,IAAI,EAAE,MAAM,EAAK,EAAA,IAAI,EAAE,CAAC;AACjC,qBAAA;AACF,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6EG;AACI,IAAA,oBAAoB,CACzB,SAAiB,EACjB,OAAA,GAAqC,EAAE,EAAA;QAKvC,IAAI,SAAS,KAAK,EAAE,EAAE;AACpB,YAAA,MAAM,IAAI,UAAU,CAAC,iDAAiD,CAAC,CAAC;AACzE,SAAA;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;AAClC,YAAA,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAClC,SAAA;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;AACtC,YAAA,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;AACrC,SAAA;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;AACrC,YAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACpC,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;AACD,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,GAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,EACnD,CAAC;;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;QAClE,OAAO;AACL;;AAEG;AACH,YAAA,MAAM,IAAI,GAAA;AACR,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE,QAAQ,CAAC,iBAAiB,EACrE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,cAAc,EACjB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;;;;;;AAgBG;IACK,MAAM,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE,EAAA;AAEpD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,EAAA,EAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,CAAA,EAAA,EACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,CAAA,EAAA,GAAA,IAAI,CAAC,IAAI,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,qBAAA;AACD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,IAAI,CAAA,EAAA,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAG,CAAA,CAAA;iBACvD,CAAC,GACH,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEF,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACY,IAAA,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE,EAAA;;AAEpD,YAAA,IAAI,QAAQ,CAAC;AACb,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,QAAQ,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;AACtC,oBAAA,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,MAAM,MAAAA,aAAA,CAAA,QAAQ,CAAA,CAAC;AAChB,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;AAQG;AACY,IAAA,oBAAoB,CACjC,sBAA8B,EAC9B,OAAA,GAAkD,EAAE,EAAA;;;AAEpD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;AACtB,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4EG;AACI,IAAA,eAAe,CACpB,sBAA8B,EAC9B,OAAA,GAA0C,EAAE,EAAA;;AAG5C,QAAA,MAAM,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,EACpF,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;IAEO,uBAAuB,GAAA;AAC7B,QAAA,IAAI,aAAa,CAAC;QAClB,IAAI;;;;;;YAOF,MAAM,SAAS,GAAGjC,mBAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAE7C,YAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;;;AAIjD,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;AAAM,iBAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;AAIvC,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;AAAM,iBAAA;;;AAGL,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;;AAGD,YAAA,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAElD,IAAI,CAAC,aAAa,EAAE;AAClB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACvD,aAAA;AAED,YAAA,OAAO,aAAa,CAAC;AACtB,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;AAC/E,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,cAAc,CAAC,OAAuC,EAAA;AAC3D,QAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;YAC7B,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,gBAAA,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,IAC/B,OAAO,CAAA,EAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;KACJ;AAED;;;;;;AAMG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AACF;;AC9lED;AACA;AAEA;;;;;;;;AAQG;MACU,qBAAqB,CAAA;AAAlC,IAAA,WAAA,GAAA;AA4GE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;KA0DzC;AApOC;;;;AAIG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE1D,QAAA,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;AAC3B,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC3C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACrC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACnD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC7C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAA,CAAE,CAAC,CAAC;AAC9D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,qBAAqB,CAAC;KAC9B;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAAyC,EAAA;AAC1D,QAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAC1D,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;AAC5C,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;AACtC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACpD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;AAC9C,SAAA;AACD,QAAA,OAAO,qBAAqB,CAAC;KAC9B;AAmED;;;;;;;;;AASG;IACI,QAAQ,GAAA;;;;QAIb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;ACjPD;AACA;AAEA;;;;;;;;AAQG;MACU,uBAAuB,CAAA;AAApC,IAAA,WAAA,GAAA;AA6BE;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAS,CAAA,SAAA,GAAY,KAAK,CAAC;AAElC;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;KAqBhC;AA9DC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,aAAqB,EAAA;AACvC,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAE9D,QAAA,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;AAC7B,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,SAAS,GAAG,IAAI,CAAC;oBACzC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,0BAA0B,CAAC,CAAA,CAAE,CAAC,CAAC;AACvD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAiBD;;;;;AAKG;IACI,QAAQ,GAAA;QACb,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;AACD,QAAA,OAAO,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC/B;AACF;;AC3ED;AACA;AAEA;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;AAgCE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;KAsB/B;AAvEC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,QAAgB,EAAA;AAClC,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEpD,QAAA,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE;AACxB,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAA,CAAE,CAAC,CAAC;AAC3D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAsBD;;;AAGG;IACI,QAAQ,GAAA;QACb,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;AACD,QAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC1B;AACF;;ACpFD;AA6EA;;;;;;;;;;AAUG;AACa,SAAA,iCAAiC,CAC/C,yBAAoD,EACpD,mBAA+C,EAAA;AAE/C,IAAA,MAAM,OAAO,GAAG,yBAAyB,CAAC,OAAO;UAC7C,yBAAyB,CAAC,OAAO;UACjC,eAAe,CAAC;IAEpB,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,qBAAqB;QAC3D,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,aAAa;QACnD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,eAAe;QACrD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,GAAG;QACzC,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,MAAM;QAC5C,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;AAED,IAAA,IAAI,yBAAyB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;AACvE,QAAA,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;AAC/F,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,qBAAqB,CAAC,KAAK,CACnD,yBAAyB,CAAC,WAAW,CAAC,QAAQ,EAAE,CACjD,CAAC;AACF,IAAA,MAAM,cAAc,GAAG,kBAAkB,CAAC,KAAK,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;AAC/F,IAAA,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACvD,yBAAyB,CAAC,aAAa,CACxC,CAAC,QAAQ,EAAE,CAAC;AAEb,IAAA,IAAI,YAAoB,CAAC;IAEzB,IAAI,OAAO,IAAI,YAAY,EAAE;AAC3B,QAAA,YAAY,GAAG;AACb,YAAA,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;AACnB,YAAA,yBAAyB,CAAC,QAAQ;kBAC9B,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;AACjE,kBAAE,EAAE;AACN,YAAA,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;AAChE,YAAA,yBAAyB,CAAC,OAAO,GAAG,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,GAAG,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,GAAG,yBAAyB,CAAC,QAAQ,GAAG,EAAE;YAC5E,OAAO;YACP,yBAAyB,CAAC,eAAe,GAAG,yBAAyB,CAAC,eAAe,GAAG,EAAE;AAC1F,YAAA,EAAE;AACH,SAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACd,KAAA;AAAM,SAAA;AACL,QAAA,YAAY,GAAG;AACb,YAAA,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;AACnB,YAAA,yBAAyB,CAAC,QAAQ;kBAC9B,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;AACjE,kBAAE,EAAE;AACN,YAAA,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;AAChE,YAAA,yBAAyB,CAAC,OAAO,GAAG,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,GAAG,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,GAAG,yBAAyB,CAAC,QAAQ,GAAG,EAAE;YAC5E,OAAO;AACP,YAAA,EAAE;AACH,SAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACd,KAAA;IAED,MAAM,SAAS,GAAW,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAE9E,OAAO,IAAI,kBAAkB,CAC3B,OAAO,EACP,SAAS,EACT,iBAAiB,CAAC,QAAQ,EAAE,EAC5B,cAAc,EACd,mBAAmB,EACnB,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,SAAS,EACnC,yBAAyB,CAAC,OAAO,EACjC,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,yBAAyB,CAAC,eAAe,CAC1C,CAAC;AACJ;;ACqJA;;;AAGG;AACG,MAAO,iBAAkB,SAAQ,aAAa,CAAA;IAuGlD,WACE,CAAA,GAAW,EACX,oBAIgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;AACjC,SAAA;AAAM,aAAA,IACL,CAACC,eAAM,IAAI,oBAAoB,YAAY,0BAA0B;AACrE,YAAA,oBAAoB,YAAY,mBAAmB;YACnDe,0BAAiB,CAAC,oBAAoB,CAAC,EACvC;AACA,YAAA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;AACvD,SAAA;AAAM,aAAA;;YAEL,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAC9D;AA3HD;;;;;;;;;;;AAWG;IACI,OAAO,oBAAoB,CAChC,gBAAwB;;;IAGxB,OAAgC,EAAA;AAEhC,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;AACtE,QAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,YAAA,IAAIf,eAAM,EAAE;AACV,gBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;AAEF,gBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;oBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,iBAAA;gBAED,MAAM,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;gBAC3D,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,aAAA;AACF,SAAA;AAAM,aAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;YAClD,MAAM,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AACjE,YAAA,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,GAAG,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AAC9F,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,SAAA;KACF;AAiFD;;;;;;;;;;;AAWG;AACI,IAAA,kBAAkB,CAAC,aAAqB,EAAA;AAC7C,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EAC5D,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;AAMG;AACI,IAAA,MAAM,eAAe,CAC1B,aAAqB,EACrB,UAAkC,EAAE,EAAA;AAKpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,MAAM,uBAAuB,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC7E,OAAO;gBACL,eAAe;gBACf,uBAAuB;aACxB,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,eAAe,CAC1B,aAAqB,EACrB,UAAwC,EAAE,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;AAC/D,YAAA,OAAO,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AACrD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,iBAAiB,CAC5B,oBAA4B,EAC5B,uBAA+B,EAC/B,UAA2C,EAAE,EAAA;AAK7C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAC7C,OAAO,CAAC,wBAAwB,IAAI,oBAAoB,CACzD,CAAC;;YAEF,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;AAChF,YAAA,MAAM,yBAAyB,GAAG,MAAM,gBAAgB,CAAC,OAAO,iBAC9D,oBAAoB;gBACpB,uBAAuB,EAAA,EACpB,cAAc,CAAA,CACjB,CAAC;AACH,YAAA,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,CAAC;AACvD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;;;IAGK,MAAM,eAAe,CAC3B,mBAA2B,EAC3B,wBAAgC,EAChC,UAAyC,EAAE,EAAA;;AAK3C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,wBAAwB,CAAC,CAAC;;YAE1E,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,uBAAuB,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,mBAAmB,EAC5E,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,cAAc,KACjB,aAAa,EAAE,MAAA,OAAO,CAAC,eAAe,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,OAAO,IAC/C,CAAC;AACH,YAAA,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,CAAC;AACrD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,aAAa,CACxB,UAAiC,EACjC,UAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,UAAU,EACvD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;AACK,IAAA,MAAM,qBAAqB,CACjC,MAAe,EACf,UAA+C,EAAE,EAAA;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAEhG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,qBAAqB,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACH,EAAA,OAAO,KACV,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,OAAO,EAAA,CAAA,EAC/E,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;AAiBG;IACK,MAAM,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE,EAAA;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QAEF,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,EAAA,EAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,CAAA,EAAA,EACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,CAAA,EAAA,GAAA,IAAI,CAAC,IAAI,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,qBAAA;AACD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,IAAI,CAAA,EAAA,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAG,CAAA,CAAA;iBACvD,CAAC,GACH,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACY,IAAA,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE,EAAA;;AAElD,YAAA,IAAI,QAAQ,CAAC;AACb,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,QAAQ,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;AACtC,oBAAA,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,MAAM,MAAAA,aAAA,CAAA,QAAQ,CAAA,CAAC;AAChB,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;AAQG;AACY,IAAA,oBAAoB,CACjC,sBAA8B,EAC9B,OAAA,GAAgD,EAAE,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;AACtB,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8EG;AACI,IAAA,eAAe,CACpB,sBAA8B,EAC9B,OAAA,GAAwC,EAAE,EAAA;;AAG1C,QAAA,MAAM,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,EACpF,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;AAWG;AACY,IAAA,YAAY,CACzB,MAAe,EACf,OAAA,GAA+C,EAAE,EAAA;;AAEjD,YAAA,IAAI,6BAA6B,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,6BAA6B,GAAG,MAAMT,aAAA,CAAA,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;AAClF,oBAAA,6BAA6B,CAAC,cAAc;AAC1C,wBAAA,6BAA6B,CAAC,cAAc,IAAI,EAAE,CAAC;AACrD,oBAAA,MAAM,GAAG,6BAA6B,CAAC,iBAAiB,CAAC;AACzD,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,6BAA6B,CAAA,CAAA,CAAC;AAC3C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;AAIG;IACY,SAAS,CACtB,UAA+C,EAAE,EAAA;;;AAEjD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAAnD,MAAM,OAAO,WAAA,CAAA;oBACtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,cAAc,CAAA,CAAA,CAAA,CAAC;AAC/B,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyEG;IACI,cAAc,CACnB,UAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,OAAO,GAAgC,EAAE,CAAC;QAChD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,aAAa,EAAE;AACzB,YAAA,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACxB,SAAA;;QAGD,MAAM,kBAAkB,mCACnB,OAAO,CAAA,GACN,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,GAAG,EAAE,EAC1C,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC;QAChD,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;AAUG;IACI,MAAM,oBAAoB,CAC/B,QAAc,EACd,SAAe,EACf,UAA8C,EAAE,EAAA;AAEhD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAC/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,oBAAoB,CAC7D;AACE,gBAAA,QAAQ,EAAE,oBAAoB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC/C,gBAAA,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE,KAAK,CAAC;aAClD,EAEC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AAEF,YAAA,MAAM,iBAAiB,GAAG;gBACxB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,QAAQ,CAAC,cAAc;AACvC,gBAAA,cAAc,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC;AACjD,gBAAA,eAAe,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,KAAK,EAAE,QAAQ,CAAC,KAAK;aACtB,CAAC;AAEF,YAAA,MAAM,GAAG,GACP,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe,EACzC,OAAO,EAAE,QAAQ,CAAC,OAAO,EACzB,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC1B,EAAA,iBAAiB,CACrB,CAAC;AAEF,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AAED;;;;;;;;;;;;;AAaG;AACI,IAAA,qBAAqB,CAC1B,SAAgB,EAChB,WAAA,GAAqC,qBAAqB,CAAC,KAAK,CAAC,GAAG,CAAC,EACrE,aAAA,GAAwB,KAAK,EAC7B,UAA+C,EAAE,EAAA;QAEjD,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,YAAA,MAAM,UAAU,CACd,+FAA+F,CAChG,CAAC;AACH,SAAA;QAED,IAAI,SAAS,KAAK,SAAS,EAAE;AAC3B,YAAA,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;AACvB,YAAA,SAAS,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC;AACnD,SAAA;AAED,QAAA,MAAM,GAAG,GAAG,iCAAiC,CAAA,MAAA,CAAA,MAAA,CAAA,EAEzC,WAAW;YACX,SAAS;YACT,aAAa,EACb,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAC/C,EAAA,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;QAEb,OAAO,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;KACxC;AACF;;ACnyCD;AACA;AAkQA;AACYe,8CAEX;AAFD,CAAA,UAAY,4BAA4B,EAAA;AACtC,IAAA,4BAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACnB,CAAC,EAFWA,oCAA4B,KAA5BA,oCAA4B,GAEvC,EAAA,CAAA,CAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/package.json b/node_modules/@azure/storage-blob/package.json new file mode 100644 index 0000000..cb8d8af --- /dev/null +++ b/node_modules/@azure/storage-blob/package.json @@ -0,0 +1,187 @@ +{ + "name": "@azure/storage-blob", + "sdk-type": "client", + "version": "12.17.0", + "description": "Microsoft Azure Storage SDK for JavaScript - Blob", + "main": "./dist/index.js", + "module": "./dist-esm/storage-blob/src/index.js", + "browser": { + "./dist-esm/storage-blob/src/index.js": "./dist-esm/storage-blob/src/index.browser.js", + "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js": "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js", + "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js": "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js", + "./dist-esm/storage-blob/src/utils/utils.node.js": "./dist-esm/storage-blob/src/utils/utils.browser.js", + "./dist-esm/storage-blob/test/utils/index.js": "./dist-esm/storage-blob/test/utils/index.browser.js", + "./dist-esm/storage-blob/src/BatchUtils.js": "./dist-esm/storage-blob/src/BatchUtils.browser.js", + "./dist-esm/storage-blob/src/BlobDownloadResponse.js": "./dist-esm/storage-blob/src/BlobDownloadResponse.browser.js", + "./dist-esm/storage-blob/src/BlobQueryResponse.js": "./dist-esm/storage-blob/src/BlobQueryResponse.browser.js", + "./dist-esm/storage-common/src/BufferScheduler.js": "./dist-esm/storage-common/src/BufferScheduler.browser.js", + "./dist-esm/storage-common/src/index.js": "./dist-esm/storage-common/src/index.browser.js", + "fs": false, + "os": false, + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/storage-blob/src/index.js" + }, + "types": "./types/latest/storage-blob.d.ts", + "typesVersions": { + "<3.6": { + "*": [ + "./types/3.1/storage-blob.d.ts" + ] + } + }, + "engines": { + "node": ">=14.0.0" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:browser": "tsc -p . && dev-tool run bundle", + "build:node": "tsc -p . && dev-tool run bundle", + "build:test": "tsc -p . && dev-tool run bundle", + "build:types": "downlevel-dts types/latest types/3.1", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "build:samples": "echo Obsolete;", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml", + "clean:samples": "rimraf samples/v12/javascript/node_modules samples/v12/typescript/node_modules samples/v12/typescript/dist samples/v12/typescript/package-lock.json samples/v12/javascript/package-lock.json", + "extract-api": "tsc -p . && api-extractor run --local", + "execute:samples": "dev-tool samples run samples-dev", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "karma start --single-run", + "integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --full-trace -t 300000 \"dist-esm/storage-blob/test/*.spec.js\" \"dist-esm/storage-blob/test/node/*.spec.js\"", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "generate:client": "autorest --typescript ./swagger/README.md", + "lint:fix": "eslint package.json api-extractor.json README.md src test --ext .ts,.javascript,.js --fix", + "lint": "eslint package.json api-extractor.json README.md src test --ext .ts,.javascript,.js", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser", + "test:node": "npm run clean && npm run build:test && npm run unit-test:node", + "test": "npm run clean && npm run build:test && npm run unit-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm --require ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace \"test/{,!(browser)/**/}*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "emulator-tests": "cross-env STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true && npm run test:node" + }, + "files": [ + "BreakingChanges.md", + "dist/", + "dist-esm/storage-blob/src/", + "dist-esm/storage-internal-avro/src/", + "dist-esm/storage-common/src/", + "types/latest/storage-blob.d.ts", + "types/3.1/storage-blob.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "storage", + "blob", + "cloud", + "node.js", + "typescript", + "javascript", + "browser" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/", + "sideEffects": false, + "//metadata": { + "constantPaths": [ + { + "path": "src/generated/src/storageClientContext.ts", + "prefix": "packageVersion" + }, + { + "path": "src/utils/constants.ts", + "prefix": "SDK_VERSION" + }, + { + "path": "swagger/README.md", + "prefix": "package-version" + } + ] + }, + "//sampleConfiguration": { + "skip": [ + "advancedRequestOptions.js", + "anonymousAuth.js", + "azureAdAuth.js", + "customPipeline.js", + "customizedClientHeaders.js", + "listBlobsByHierarchy.js", + "listBlobs.js", + "listContainers.js", + "snapshots.js", + "sharedKeyAuth.js" + ], + "productName": "Azure Storage Blob", + "productSlugs": [ + "azure", + "azure-storage" + ], + "requiredResources": { + "Azure Storage Account": "https://docs.microsoft.com/azure/storage/common/storage-account-overview" + } + }, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^3.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/identity": "^2.0.1", + "@azure/test-utils": "^1.0.0", + "@azure-tools/test-recorder": "^1.0.0", + "@azure/test-utils-perf": "^1.0.0", + "@microsoft/api-extractor": "^7.31.1", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^14.0.0", + "@types/node-fetch": "^2.5.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "dotenv": "^16.0.0", + "downlevel-dts": "^0.10.0", + "es6-promise": "^4.2.5", + "eslint": "^8.0.0", + "esm": "^3.2.18", + "inherits": "^2.0.3", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-json-preprocessor": "^0.3.3", + "karma-json-to-file-reporter": "^1.0.1", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "puppeteer": "^19.2.2", + "rimraf": "^3.0.0", + "source-map-support": "^0.5.9", + "ts-node": "^10.0.0", + "typescript": "~4.8.0", + "util": "^0.12.1" + } +} diff --git a/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts b/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts new file mode 100644 index 0000000..880d2f4 --- /dev/null +++ b/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts @@ -0,0 +1,9430 @@ +/// +import { AbortSignalLike } from '@azure/abort-controller'; +import { AzureLogger } from '@azure/logger'; +import { BaseRequestPolicy } from '@azure/core-http'; +import { CancelOnProgress } from '@azure/core-lro'; +import * as coreHttp from '@azure/core-http'; +import { deserializationPolicy } from '@azure/core-http'; +import { HttpHeaders } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { HttpRequestBody } from '@azure/core-http'; +import { HttpResponse } from '@azure/core-http'; +import { HttpClient as IHttpClient } from '@azure/core-http'; +import { KeepAliveOptions } from '@azure/core-http'; +import { OperationTracingOptions } from '@azure/core-tracing'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { PollerLike } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { ProxyOptions } from '@azure/core-http'; +import { Readable } from 'stream'; +import { RequestPolicy } from '@azure/core-http'; +import { RequestPolicyFactory } from '@azure/core-http'; +import { RequestPolicyOptions } from '@azure/core-http'; +import { RestError } from '@azure/core-http'; +import { ServiceClientOptions } from '@azure/core-http'; +import { TokenCredential } from '@azure/core-http'; +import { TransferProgressEvent } from '@azure/core-http'; +import { UserAgentOptions } from '@azure/core-http'; +import { WebResource } from '@azure/core-http'; +/** An Access policy */ +export declare interface AccessPolicy { + /** the date-time the policy is active */ + startsOn?: string; + /** the date-time the policy expires */ + expiresOn?: string; + /** the permissions for the acl policy */ + permissions?: string; +} +/** Defines values for AccessTier. */ +export declare type AccessTier = "P4" | "P6" | "P10" | "P15" | "P20" | "P30" | "P40" | "P50" | "P60" | "P70" | "P80" | "Hot" | "Cool" | "Archive" | "Cold"; +/** Defines values for AccountKind. */ +export declare type AccountKind = "Storage" | "BlobStorage" | "StorageV2" | "FileStorage" | "BlockBlobStorage"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class AccountSASPermissions { + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions: string): AccountSASPermissions; + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions; + /** + * Permission to read resources and list queues and tables granted. + */ + read: boolean; + /** + * Permission to write resources granted. + */ + write: boolean; + /** + * Permission to create blobs and files granted. + */ + delete: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add: boolean; + /** + * Permission to create blobs and files granted. + */ + create: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update: boolean; + /** + * Permission to get and delete messages granted. + */ + process: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Permission to filter blobs. + */ + filter: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} +/** + * A type that looks like an account SAS permission. + * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface AccountSASPermissionsLike { + /** + * Permission to read resources and list queues and tables granted. + */ + read?: boolean; + /** + * Permission to write resources granted. + */ + write?: boolean; + /** + * Permission to delete blobs and files granted. + */ + delete?: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion?: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list?: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add?: boolean; + /** + * Permission to create blobs and files granted. + */ + create?: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update?: boolean; + /** + * Permission to get and delete messages granted. + */ + process?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Permission to filter blobs. + */ + filter?: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export declare class AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes: string): AccountSASResourceTypes; + /** + * Permission to access service level APIs granted. + */ + service: boolean; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container: boolean; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object: boolean; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export declare class AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services: string): AccountSASServices; + /** + * Permission to access blob resources granted. + */ + blob: boolean; + /** + * Permission to access file resources granted. + */ + file: boolean; + /** + * Permission to access queue resources granted. + */ + queue: boolean; + /** + * Permission to access table resources granted. + */ + table: boolean; + /** + * Converts the given services to a string. + * + */ + toString(): string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once + * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation + * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters} + * exist because the former is mutable and a logical representation while the latter is immutable and used to generate + * actual REST requests. + * + * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + * for more conceptual information on SAS + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * for descriptions of the parameters, including which are required + */ +export declare interface AccountSASSignatureValues { + /** + * If not provided, this defaults to the service version targeted by this version of the library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * The time after which the SAS will no longer work. + */ + expiresOn: Date; + /** + * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help + * constructing the permissions string. + */ + permissions: AccountSASPermissions; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to + * construct this value. + */ + services: string; + /** + * The values that indicate the resource types accessible with this SAS. Please refer + * to {@link AccountSASResourceTypes} to construct this value. + */ + resourceTypes: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export declare class AnonymousCredential extends Credential_2 { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy; +} +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export declare class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); +} +/** Defines headers for AppendBlob_appendBlockFromUrl operation. */ +export declare interface AppendBlobAppendBlockFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation. + */ +export declare interface AppendBlobAppendBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the appendBlockFromUrl operation. */ +export declare type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockFromUrlHeaders; + }; +}; +/** Defines headers for AppendBlob_appendBlock operation. */ +export declare interface AppendBlobAppendBlockHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link AppendBlobClient.appendBlock} operation. + */ +export declare interface AppendBlobAppendBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Callback to receive events on the progress of append block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the appendBlock operation. */ +export declare type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockHeaders; + }; +}; +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export declare class AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + private appendBlobContext; + /** + * + * Creates an instance of AppendBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): AppendBlobClient; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + create(options?: AppendBlobCreateOptions): Promise; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + createIfNotExists(options?: AppendBlobCreateIfNotExistsOptions): Promise; + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + seal(options?: AppendBlobSealOptions): Promise; + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + appendBlock(body: HttpRequestBody, contentLength: number, options?: AppendBlobAppendBlockOptions): Promise; + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + appendBlockFromURL(sourceURL: string, sourceOffset: number, count: number, options?: AppendBlobAppendBlockFromURLOptions): Promise; +} +/** Defines headers for AppendBlob_create operation. */ +export declare interface AppendBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link AppendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * HTTP headers to set when creating append blobs. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; +} +/** + * Contains response data for the {@link appendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure {@link AppendBlobClient.create} operation. + */ +export declare interface AppendBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating append blobs. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when creating append blobs. A common header + * to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the create operation. */ +export declare type AppendBlobCreateResponse = AppendBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobCreateHeaders; + }; +}; +/** + * Conditions to add to the creation of this append blob. + */ +export declare interface AppendBlobRequestConditions extends BlobRequestConditions, AppendPositionAccessConditions { +} +/** + * Options to configure {@link AppendBlobClient.seal} operation. + */ +export declare interface AppendBlobSealOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet. + */ + conditions?: AppendBlobRequestConditions; +} +/** Parameter group */ +export declare interface AppendPositionAccessConditions { + /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + maxSize?: number; + /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + appendPosition?: number; +} +/** Defines values for ArchiveStatus. */ +export declare type ArchiveStatus = "rehydrate-pending-to-hot" | "rehydrate-pending-to-cool" | "rehydrate-pending-to-cold"; +export { BaseRequestPolicy }; +/** + * A request associated with a batch operation. + */ +export declare interface BatchSubRequest { + /** + * The URL of the resource to request operation. + */ + url: string; + /** + * The credential used for sub request. + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. + * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; +} +/** + * The response data associated with a single request within a batch operation. + */ +export declare interface BatchSubResponse { + /** + * The status code of the sub operation. + */ + status: number; + /** + * The status message of the sub operation. + */ + statusMessage: string; + /** + * The error code of the sub operation, if the sub operation failed. + */ + errorCode?: string; + /** + * The HTTP response headers. + */ + headers: HttpHeaders; + /** + * The body as text. + */ + bodyAsText?: string; + /** + * The batch sub request corresponding to the sub response. + */ + _request: BatchSubRequest; +} +/** Defines headers for Blob_abortCopyFromURL operation. */ +export declare interface BlobAbortCopyFromURLHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.abortCopyFromURL} operation. + */ +export declare interface BlobAbortCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** Contains response data for the abortCopyFromURL operation. */ +export declare type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobAbortCopyFromURLHeaders; + }; +}; +/** + * Options to configure Blob - Acquire Lease operation. + */ +export declare interface BlobAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export declare class BlobBatch { + private batchRequest; + private readonly batch; + private batchType; + constructor(); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType(): string; + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody(): string; + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests(): Map; + private addSubRequestInternal; + private setBatchType; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlob(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param options - + */ + deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobAccessTier(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param tier - + * @param options - + */ + setBlobAccessTier(blobClient: BlobClient, tier: AccessTier, options?: BlobSetTierOptions): Promise; +} +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export declare class BlobBatchClient { + private serviceOrContainerContext; + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch(): BlobBatch; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operations will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resources to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlobs(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation(subrequest) will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs to delete. + * @param options - + */ + deleteBlobs(blobClients: BlobClient[], options?: BlobDeleteOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobsAccessTier(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs which should have a new tier set. + * @param tier - + * @param options - + */ + setBlobsAccessTier(blobClients: BlobClient[], tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + submitBatch(batchRequest: BlobBatch, options?: BlobBatchSubmitBatchOptionalParams): Promise; +} +/** + * Contains response data for the {@link deleteBlobs} operation. + */ +export declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse; +/** + * Contains response data for the {@link setBlobsAccessTier} operation. + */ +export declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse; +/** + * Options to configure the Service - Submit Batch Optional Params. + */ +export declare interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel { +} +/** + * Contains response data for blob batch operations. + */ +export declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse & ServiceSubmitBatchHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions { + /** + * The amount of time in milliseconds the poller should wait between + * calls to the service to determine the status of the Blob copy. + * Defaults to 15 seconds. + */ + intervalInMs?: number; + /** + * Callback to receive the state of the copy progress. + */ + onProgress?: (state: BlobBeginCopyFromUrlPollState) => void; + /** + * Serialized poller state that can be used to resume polling from. + * This may be useful when starting a copy on one process or thread + * and you wish to continue polling on another process or thread. + * + * To get serialized poller state, call `poller.toString()` on an existing + * poller. + */ + resumeFrom?: string; +} +/** + * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}. + * + * This state is passed into the user-specified `onProgress` callback + * whenever copy progress is detected. + */ +export declare interface BlobBeginCopyFromUrlPollState extends PollOperationState { + /** + * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}. + */ + readonly blobClient: CopyPollerBlobClient; + /** + * The copyId that identifies the in-progress blob copy. + */ + copyId?: string; + /** + * the progress of the blob copy as reported by the service. + */ + copyProgress?: string; + /** + * The source URL provided in {@link BlobClient.beginCopyFromURL}. + */ + copySource: string; + /** + * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call. + * This is exposed for the poller and should not be modified directly. + */ + readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions; +} +/** + * Contains response data for the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse { +} +/** + * Options to configure Blob - Break Lease operation. + */ +export declare interface BlobBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Blob - Change Lease operation. + */ +export declare interface BlobChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export declare class BlobClient extends StorageClient { + /** + * blobContext provided by protocol layer. + */ + private blobContext; + private _name; + private _containerName; + private _versionId?; + private _snapshot?; + /* + * The name of the blob. + */ + readonly name: string; + /* + * The name of the storage container the blob is associated with. + */ + readonly containerName: string; + /** + * + * Creates an instance of BlobClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot: string): BlobClient; + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId: string): BlobClient; + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient(): AppendBlobClient; + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient(): BlockBlobClient; + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient(): PageBlobClient; + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + download(offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + exists(options?: BlobExistsOptions): Promise; + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + getProperties(options?: BlobGetPropertiesOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + delete(options?: BlobDeleteOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + deleteIfExists(options?: BlobDeleteOptions): Promise; + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + undelete(options?: BlobUndeleteOptions): Promise; + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + setHTTPHeaders(blobHTTPHeaders?: BlobHTTPHeaders, options?: BlobSetHTTPHeadersOptions): Promise; + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: BlobSetMetadataOptions): Promise; + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + setTags(tags: Tags, options?: BlobSetTagsOptions): Promise; + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + getTags(options?: BlobGetTagsOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + createSnapshot(options?: BlobCreateSnapshotOptions): Promise; + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + beginCopyFromURL(copySource: string, options?: BlobBeginCopyFromURLOptions): Promise, BlobBeginCopyFromURLResponse>>; + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + abortCopyFromURL(copyId: string, options?: BlobAbortCopyFromURLOptions): Promise; + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + syncCopyFromURL(copySource: string, options?: BlobSyncCopyFromURLOptions): Promise; + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + setAccessTier(tier: BlockBlobTier | PremiumPageBlobTier | string, options?: BlobSetTierOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param buffer - Buffer to be fill, must have length larger than count + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(buffer: Buffer, offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + downloadToFile(filePath: string, offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + private getBlobAndContainerNamesFromUrl; + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + private startCopyFromURL; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: BlobGenerateSasUrlOptions): Promise; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + deleteImmutabilityPolicy(options?: BlobDeleteImmutabilityPolicyOptions): Promise; + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + setImmutabilityPolicy(immutabilityPolicy: BlobImmutabilityPolicy, options?: BlobSetImmutabilityPolicyOptions): Promise; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + setLegalHold(legalHoldEnabled: boolean, options?: BlobSetLegalHoldOptions): Promise; +} +/** Defines headers for Blob_copyFromURL operation. */ +export declare interface BlobCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: SyncCopyStatusType; + /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */ + contentMD5?: Uint8Array; + /** This response header is returned so that the client can check for the integrity of the copied content. */ + xMsContentCrc64?: Uint8Array; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the copyFromURL operation. */ +export declare type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCopyFromURLHeaders; + }; +}; +/** Defines values for BlobCopySourceTags. */ +export declare type BlobCopySourceTags = "REPLACE" | "COPY"; +/** Defines headers for Blob_createSnapshot operation. */ +export declare interface BlobCreateSnapshotHeaders { + /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */ + snapshot?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.createSnapshot} operation. + */ +export declare interface BlobCreateSnapshotOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet when creating blob snapshots. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the createSnapshot operation. */ +export declare type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCreateSnapshotHeaders; + }; +}; +/** Defines headers for Blob_delete operation. */ +export declare interface BlobDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link BlobClient.deleteIfExists} operation. + */ +export declare interface BlobDeleteIfExistsResponse extends BlobDeleteResponse { + /** + * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place. + */ + succeeded: boolean; +} +/** Defines headers for Blob_deleteImmutabilityPolicy operation. */ +export declare interface BlobDeleteImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} +/** + * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation. + */ +export declare interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the deleteImmutabilityPolicy operation. */ +export declare type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteImmutabilityPolicyHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.delete} operation. + */ +export declare interface BlobDeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting blobs. + */ + conditions?: BlobRequestConditions; + /** + * Specifies options to delete blobs that have associated snapshots. + * - `include`: Delete the base blob and all of its snapshots. + * - `only`: Delete only the blob's snapshots and not the blob itself. + */ + deleteSnapshots?: DeleteSnapshotsOptionType; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the delete operation. */ +export declare type BlobDeleteResponse = BlobDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteHeaders; + }; +}; +/** Defines headers for Blob_download operation. */ +export declare interface BlobDownloadHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** The number of tags associated with the blob */ + tagCount?: number; + /** If this blob has been sealed */ + isSealed?: boolean; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} +/** Optional parameters. */ +export declare interface BlobDownloadOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + leaseAccessConditions?: LeaseAccessConditions; + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** Parameter group */ + cpkInfo?: CpkInfo; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */ + snapshot?: string; + /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */ + versionId?: string; + /** Return only the bytes of the blob in the specified range. */ + range?: string; + /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentMD5?: boolean; + /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentCRC64?: boolean; +} +/** + * Options to configure the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve. + */ + snapshot?: string; + /** + * When this is set to true and download range of blob, the service returns the MD5 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentMD5?: boolean; + /** + * When this is set to true and download range of blob, the service returns the CRC64 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentCrc64?: boolean; + /** + * Conditions to meet when downloading blobs. + */ + conditions?: BlobRequestConditions; + /** + * Call back to receive events on the progress of download operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original body download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional `FileClient.download()` request will be made + * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached. + * + * Default value is 5, please set a larger value when loading large files in poor network. + */ + maxRetryRequests?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the download operation. */ +export declare type BlobDownloadResponseModel = BlobDownloadHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDownloadHeaders; + }; +}; +/** + * Contains response data for the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadResponseParsed extends BlobDownloadResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} +/** + * Option interface for the {@link BlobClient.downloadToBuffer} operation. + */ +export declare interface BlobDownloadToBufferOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * blockSize is the data every request trying to download. + * Must be greater than or equal to 0. + * If set to 0 or undefined, blockSize will automatically calculated according to the blob size. + */ + blockSize?: number; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original block download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional FileClient.download() request will be made + * from the broken point, until the requested block has been successfully downloaded or + * maxRetryRequestsPerBlock is reached. + * + * Default value is 5, please set a larger value when in poor network. + */ + maxRetryRequestsPerBlock?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel download. + */ + concurrency?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** + * Options to configure the {@link BlobClient.exists} operation. + */ +export declare interface BlobExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Conditions to meet. + */ + conditions?: BlobRequestConditions; +} +/** + * An interface representing BlobFlatListSegment. + */ +export declare interface BlobFlatListSegment { + blobItems: BlobItem[]; +} +export declare interface BlobFlatListSegmentModel { + blobItems: BlobItemInternal[]; +} +/** + * Options to configure {@link BlobClient.generateSasUrl} operation. + */ +export declare interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: BlobSASPermissions; +} +/** Defines headers for Blob_getProperties operation. */ +export declare interface BlobGetPropertiesHeaders { + /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Included if the blob is incremental copy blob. */ + isIncrementalCopy?: boolean; + /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */ + destinationSnapshot?: string; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */ + contentLength?: number; + /** The content type specified for the blob. The default content type is 'application/octet-stream' */ + contentType?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */ + accessTier?: string; + /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */ + accessTierInferred?: boolean; + /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool/rehydrate-pending-to-cold. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */ + archiveStatus?: string; + /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */ + accessTierChangedOn?: Date; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** The number of tags associated with the blob */ + tagCount?: number; + /** The time this blob will expire. */ + expiresOn?: Date; + /** If this blob has been sealed */ + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */ + rehydratePriority?: RehydratePriority; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting blob properties. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** + * Contains response data for the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} +/** Contains response data for the getProperties operation. */ +export declare type BlobGetPropertiesResponseModel = BlobGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobGetPropertiesHeaders; + }; +}; +/** Defines headers for Blob_getTags operation. */ +export declare interface BlobGetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.getTags} operation. + */ +export declare interface BlobGetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} +/** + * Contains response data for the {@link BlobClient.getTags} operation. + */ +export declare type BlobGetTagsResponse = { + tags: Tags; +} & BlobGetTagsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: BlobGetTagsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: BlobTags; + }; +}; +/** + * An interface representing BlobHierarchyListSegment. + */ +export declare interface BlobHierarchyListSegment { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItem[]; +} +export declare interface BlobHierarchyListSegmentModel { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItemInternal[]; +} +/** Parameter group */ +export declare interface BlobHTTPHeaders { + /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */ + blobCacheControl?: string; + /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */ + blobContentType?: string; + /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */ + blobContentMD5?: Uint8Array; + /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */ + blobContentEncoding?: string; + /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */ + blobContentLanguage?: string; + /** Optional. Sets the blob's Content-Disposition header. */ + blobContentDisposition?: string; +} +/** + * Describe immutable policy for blob. + */ +export declare interface BlobImmutabilityPolicy { + /** + * Specifies the date time when the blobs immutability policy is set to expire. + */ + expiriesOn?: Date; + /** + * Specifies the immutability policy mode to set on the blob. + */ + policyMode?: BlobImmutabilityPolicyMode; +} +/** Defines values for BlobImmutabilityPolicyMode. */ +export declare type BlobImmutabilityPolicyMode = "Mutable" | "Unlocked" | "Locked"; +/** + * An Azure Storage blob + */ +export declare interface BlobItem { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + properties: BlobProperties; + metadata?: { + [propertyName: string]: string; + }; + tags?: Tags; + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + hasVersionsOnly?: boolean; +} +/** An Azure Storage blob */ +export declare interface BlobItemInternal { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + /** Properties of a blob */ + properties: BlobProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; + /** Blob tags */ + blobTags?: BlobTags; + /** Dictionary of */ + objectReplicationMetadata?: { + [propertyName: string]: string; + }; + /** Inactive root blobs which have any versions would have such tag with value true. */ + hasVersionsOnly?: boolean; +} +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export declare class BlobLeaseClient { + private _leaseId; + private _url; + private _containerOrBlobOperation; + private _isContainer; + /* + * Gets the lease Id. + * + * @readonly + */ + readonly leaseId: string; + /* + * Gets the url. + * + * @readonly + */ + readonly url: string; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client: ContainerClient | BlobClient, leaseId?: string); + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + acquireLease(duration: number, options?: LeaseOperationOptions): Promise; + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + changeLease(proposedLeaseId: string, options?: LeaseOperationOptions): Promise; + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + releaseLease(options?: LeaseOperationOptions): Promise; + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + renewLease(options?: LeaseOperationOptions): Promise; + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + breakLease(breakPeriod: number, options?: LeaseOperationOptions): Promise; +} +export declare interface BlobPrefix { + name: string; +} +/** Properties of a blob */ +export declare interface BlobProperties { + createdOn?: Date; + lastModified: Date; + etag: string; + /** Size in bytes */ + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; + blobSequenceNumber?: number; + blobType?: BlobType; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + copyId?: string; + copyStatus?: CopyStatusType; + copySource?: string; + copyProgress?: string; + copyCompletedOn?: Date; + copyStatusDescription?: string; + serverEncrypted?: boolean; + incrementalCopy?: boolean; + destinationSnapshot?: string; + deletedOn?: Date; + remainingRetentionDays?: number; + accessTier?: AccessTier; + accessTierInferred?: boolean; + archiveStatus?: ArchiveStatus; + customerProvidedKeySha256?: string; + /** The name of the encryption scope under which the blob is encrypted. */ + encryptionScope?: string; + accessTierChangedOn?: Date; + tagCount?: number; + expiresOn?: Date; + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */ + rehydratePriority?: RehydratePriority; + lastAccessedOn?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; +} +/** + * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}. + */ +export declare interface BlobQueryArrowConfiguration { + /** + * Kind. + */ + kind: "arrow"; + /** + * List of {@link BlobQueryArrowField} describing the schema of the data. + */ + schema: BlobQueryArrowField[]; +} +/** + * Describe a field in {@link BlobQueryArrowConfiguration}. + */ +export declare interface BlobQueryArrowField { + /** + * The type of the field. + */ + type: BlobQueryArrowFieldType; + /** + * The name of the field. + */ + name?: string; + /** + * The precision of the field. Required if type is "decimal". + */ + precision?: number; + /** + * The scale of the field. Required if type is is "decimal". + */ + scale?: number; +} +/** + * The type of a {@link BlobQueryArrowField}. + */ +export declare type BlobQueryArrowFieldType = "int64" | "bool" | "timestamp[ms]" | "string" | "double" | "decimal"; +/** + * Options to query blob with CSV format. + */ +export declare interface BlobQueryCsvTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a CSV format blob. + */ + kind: "csv"; + /** + * Column separator. Default is ",". + */ + columnSeparator?: string; + /** + * Field quote. + */ + fieldQuote?: string; + /** + * Escape character. + */ + escapeCharacter?: string; + /** + * Has headers. Default is false. + */ + hasHeaders?: boolean; +} +/** + * Blob query error type. + */ +export declare interface BlobQueryError { + /** + * Whether error is fatal. Fatal error will stop query. + */ + isFatal: boolean; + /** + * Error name. + */ + name: string; + /** + * Position in bytes of the query. + */ + position: number; + /** + * Error description. + */ + description: string; +} +/** Defines headers for Blob_query operation. */ +export declare interface BlobQueryHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletionTime?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} +/** + * Options to query blob with JSON format. + */ +export declare interface BlobQueryJsonTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a JSON format blob. + */ + kind: "json"; +} +/** + * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}. + */ +export declare interface BlobQueryParquetConfiguration { + /** + * Kind. + */ + kind: "parquet"; +} +/** Contains response data for the query operation. */ +export declare type BlobQueryResponseModel = BlobQueryHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobQueryHeaders; + }; +}; +/** + * Options to configure Blob - Release Lease operation. + */ +export declare interface BlobReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Blob - Renew Lease operation. + */ +export declare interface BlobRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * standard HTTP conditional headers, tags condition and lease condition + */ +export declare interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions { +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): BlobSASPermissions; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString(): string; +} +/** + * A type that looks like a Blob SAS permission. + * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface BlobSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs. + */ +export declare interface BlobSASSignatureValues { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource + * being accessed for help constructing the permissions string. + */ + permissions?: BlobSASPermissions | ContainerSASPermissions; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * The name of the container the SAS user may access. + */ + containerName: string; + /** + * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided. + */ + blobName?: string; + /** + * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09. + */ + snapshotTime?: string; + /** + * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10. + */ + versionId?: string; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; + /** + * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user + * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will + * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission + * check for the user specified in this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to + * correlate SAS generation with storage resource access. This is only used for User Delegation SAS. + */ + correlationId?: string; +} +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export declare class BlobServiceClient extends StorageClient { + /** + * serviceContext provided by protocol layer. + */ + private serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): BlobServiceClient; + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + * + * Example using DefaultAzureCredential from `@azure/identity`: + * + * ```js + * const account = ""; + * + * const defaultAzureCredential = new DefaultAzureCredential(); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * defaultAzureCredential + * ); + * ``` + * + * Example using an account name/key: + * + * ```js + * const account = "" + * const sharedKeyCredential = new StorageSharedKeyCredential(account, ""); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * sharedKeyCredential + * ); + * ``` + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName: string): ContainerClient; + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + createContainer(containerName: string, options?: ContainerCreateOptions): Promise<{ + containerClient: ContainerClient; + containerCreateResponse: ContainerCreateResponse; + }>; + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + deleteContainer(containerName: string, options?: ContainerDeleteMethodOptions): Promise; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + undeleteContainer(deletedContainerName: string, deletedContainerVersion: string, options?: ServiceUndeleteContainerOptions): Promise<{ + containerClient: ContainerClient; + containerUndeleteResponse: ContainerUndeleteResponse; + }>; + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + private renameContainer; + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + getProperties(options?: ServiceGetPropertiesOptions): Promise; + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + setProperties(properties: BlobServiceProperties, options?: ServiceSetPropertiesOptions): Promise; + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + getStatistics(options?: ServiceGetStatisticsOptions): Promise; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + getAccountInfo(options?: ServiceGetAccountInfoOptions): Promise; + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + private listContainersSegment; + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ServiceFindBlobByTagsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options?: ServiceListContainersOptions): PagedAsyncIterableIterator; + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient(): BlobBatchClient; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string; +} +/** Storage Service Properties. */ +export declare interface BlobServiceProperties { + /** Azure Analytics Logging settings. */ + blobAnalyticsLogging?: Logging; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + hourMetrics?: Metrics; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + minuteMetrics?: Metrics; + /** The set of CORS rules. */ + cors?: CorsRule[]; + /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */ + defaultServiceVersion?: string; + /** the retention policy which determines how long the associated data should persist */ + deleteRetentionPolicy?: RetentionPolicy; + /** The properties that enable an account to host a static website */ + staticWebsite?: StaticWebsite; +} +/** Stats for the storage service. */ +export declare interface BlobServiceStatistics { + /** Geo-Replication information for the Secondary Storage Service */ + geoReplication?: GeoReplication; +} +/** Defines headers for Blob_setHttpHeaders operation. */ +export declare interface BlobSetHTTPHeadersHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setHTTPHeaders} operation. + */ +export declare interface BlobSetHTTPHeadersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob HTTP headers. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the setHttpHeaders operation. */ +export declare type BlobSetHTTPHeadersResponse = BlobSetHTTPHeadersHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetHTTPHeadersHeaders; + }; +}; +/** Defines headers for Blob_setImmutabilityPolicy operation. */ +export declare interface BlobSetImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates the time the immutability policy will expire. */ + immutabilityPolicyExpiry?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; +} +/** + * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation. + */ +export declare interface BlobSetImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + modifiedAccessCondition?: ModificationConditions; +} +/** Contains response data for the setImmutabilityPolicy operation. */ +export declare type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetImmutabilityPolicyHeaders; + }; +}; +/** Defines headers for Blob_setLegalHold operation. */ +export declare interface BlobSetLegalHoldHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates if the blob has a legal hold. */ + legalHold?: boolean; +} +/** + * Options for setting legal hold {@link BlobClient.setLegalHold} operation. + */ +export declare interface BlobSetLegalHoldOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the setLegalHold operation. */ +export declare type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetLegalHoldHeaders; + }; +}; +/** Defines headers for Blob_setMetadata operation. */ +export declare interface BlobSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setMetadata} operation. + */ +export declare interface BlobSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob metadata. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the setMetadata operation. */ +export declare type BlobSetMetadataResponse = BlobSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetMetadataHeaders; + }; +}; +/** Defines headers for Blob_setTags operation. */ +export declare interface BlobSetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setTags} operation. + */ +export declare interface BlobSetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} +/** Contains response data for the setTags operation. */ +export declare type BlobSetTagsResponse = BlobSetTagsHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTagsHeaders; + }; +}; +/** Defines headers for Blob_setTier operation. */ +export declare interface BlobSetTierHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setAccessTier} operation. + */ +export declare interface BlobSetTierOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; +} +/** Contains response data for the setTier operation. */ +export declare type BlobSetTierResponse = BlobSetTierHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTierHeaders; + }; +}; +/** Defines headers for Blob_startCopyFromURL operation. */ +export declare interface BlobStartCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobStartCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the blob that are being copied. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Overrides the sealed state of the destination blob. Default true. + */ + sealBlob?: boolean; +} +/** Contains response data for the startCopyFromURL operation. */ +export declare type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobStartCopyFromURLHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.syncCopyFromURL} operation. + */ +export declare interface BlobSyncCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} +export declare interface BlobTag { + key: string; + value: string; +} +/** Blob tags */ +export declare interface BlobTags { + blobTagSet: BlobTag[]; +} +/** Defines values for BlobType. */ +export declare type BlobType = "BlockBlob" | "PageBlob" | "AppendBlob"; +/** Defines headers for Blob_undelete operation. */ +export declare interface BlobUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.undelete} operation. + */ +export declare interface BlobUndeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the undelete operation. */ +export declare type BlobUndeleteResponse = BlobUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobUndeleteHeaders; + }; +}; +/** + * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and + * {@link BlockBlobClient.uploadBrowserDate}. + */ +export declare type BlobUploadCommonResponse = BlockBlobUploadHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse; +}; +/** Represents a single block in a block blob. It describes the block's ID and size. */ +export declare interface Block { + /** The base64 encoded block ID. */ + name: string; + /** The block size in bytes. */ + size: number; +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export declare class BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + private _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + private blockBlobContext; + /** + * + * Creates an instance of BlockBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): BlockBlobClient; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + query(query: string, options?: BlockBlobQueryOptions): Promise; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + upload(body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise; + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + syncUploadFromURL(sourceURL: string, options?: BlockBlobSyncUploadFromURLOptions): Promise; + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + stageBlock(blockId: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobStageBlockOptions): Promise; + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + stageBlockFromURL(blockId: string, sourceURL: string, offset?: number, count?: number, options?: BlockBlobStageBlockFromURLOptions): Promise; + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + commitBlockList(blocks: string[], options?: BlockBlobCommitBlockListOptions): Promise; + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + getBlockList(listType: BlockListType, options?: BlockBlobGetBlockListOptions): Promise; + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + uploadBrowserData(browserData: Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + private uploadSeekableInternal; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadFile(filePath: string, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadStream(stream: Readable, bufferSize?: number, maxConcurrency?: number, options?: BlockBlobUploadStreamOptions): Promise; +} +/** Defines headers for BlockBlob_commitBlockList operation. */ +export declare interface BlockBlobCommitBlockListHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.commitBlockList} operation. + */ +export declare interface BlockBlobCommitBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when committing the block list. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when committing block list. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when committing block list. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the commitBlockList operation. */ +export declare type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobCommitBlockListHeaders; + }; +}; +/** Defines headers for BlockBlob_getBlockList operation. */ +export declare interface BlockBlobGetBlockListHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The media type of the body of the response. For Get Block List this is 'application/xml' */ + contentType?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.getBlockList} operation. + */ +export declare interface BlockBlobGetBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; +} +/** Contains response data for the getBlockList operation. */ +export declare type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders & BlockList & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlockList; + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobGetBlockListHeaders; + }; +}; +/** + * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}. + */ +export declare interface BlockBlobParallelUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Destination block blob size in bytes. + */ + blockSize?: number; + /** + * Blob size threshold in bytes to start concurrency uploading. + * Default value is 256MB, blob size less than this option will + * be uploaded via one I/O operation without concurrency. + * You can customize a value less equal than the default value. + */ + maxSingleShotSize?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Blob HTTP Headers. A common header to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel uploading. Must be greater than or equal to 0. + */ + concurrency?: number; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} +/** Defines headers for BlockBlob_putBlobFromUrl operation. */ +export declare interface BlockBlobPutBlobFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the putBlobFromUrl operation. */ +export declare type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobPutBlobFromUrlHeaders; + }; +}; +/** + * Options to configure {@link BlockBlobClient.query} operation. + */ +export declare interface BlockBlobQueryOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Configurations for the query input. + */ + inputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryParquetConfiguration; + /** + * Configurations for the query output. + */ + outputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration; + /** + * Callback to receive events on the progress of query operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback to receive error events during the query operaiton. + */ + onError?: (error: BlobQueryError) => void; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Defines headers for BlockBlob_stageBlockFromURL operation. */ +export declare interface BlockBlobStageBlockFromURLHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation. + */ +export declare interface BlockBlobStageBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Specifies the bytes of the source Blob/File to upload. + * If not specified, the entire content is uploaded as a single block. + */ + range?: Range_2; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the stageBlockFromURL operation. */ +export declare type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockFromURLHeaders; + }; +}; +/** Defines headers for BlockBlob_stageBlock operation. */ +export declare interface BlockBlobStageBlockHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.stageBlock} operation. + */ +export declare interface BlockBlobStageBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * Callback to receive events on the progress of stage block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the stageBlock operation. */ +export declare type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockHeaders; + }; +}; +/** + * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation. + */ +export declare interface BlockBlobSyncUploadFromURLOptions extends CommonOptions { + /** + * Server timeout in seconds. + * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations + */ + timeoutInSeconds?: number; + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value + * pairs are specified, the operation will copy the metadata from the source blob or file to the + * destination blob. If one or more name-value pairs are specified, the destination blob is + * created with the specified metadata, and metadata is not copied from the source blob or file. + * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules + * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + * information. + */ + metadata?: Metadata; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Optional, default is true. Indicates if properties from the source blob should be copied. + */ + copySourceBlobProperties?: boolean; + /** + * HTTP headers to set when uploading to a block blob. + * + * A common header to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Conditions to meet for the destination Azure Blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Conditions to meet for the source Azure Blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export declare enum BlockBlobTier { + /** + * Optimized for storing data that is accessed frequently. + */ + Hot = "Hot", + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + Cool = "Cool", + /** + * Optimized for storing data that is rarely accessed. + */ + Cold = "Cold", + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + Archive = "Archive" +} +/** Defines headers for BlockBlob_upload operation. */ +export declare interface BlockBlobUploadHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.upload} operation. + */ +export declare interface BlockBlobUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when uploading to a block blob. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when uploading to a block blob. + */ + metadata?: Metadata; + /** + * Callback to receive events on the progress of upload operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the upload operation. */ +export declare type BlockBlobUploadResponse = BlockBlobUploadHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobUploadHeaders; + }; +}; +/** + * Option interface for the {@link BlockBlobClient.uploadStream} operation. + */ +export declare interface BlockBlobUploadStreamOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Blob HTTP Headers. + * + * A common header to set is `blobContentType`, enabling the + * browser to provide functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} +export declare interface BlockList { + committedBlocks?: Block[]; + uncommittedBlocks?: Block[]; +} +/** Defines values for BlockListType. */ +export declare type BlockListType = "committed" | "uncommitted" | "all"; +declare interface ClearRange { + start: number; + end: number; +} +/** + * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}. + */ +export declare interface CommonGenerateSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; +} +/** + * An interface for options common to every remote operation. + */ +export declare interface CommonOptions { + /** + * Options to configure spans created when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options to configure Container - Acquire Lease operation. + */ +export declare interface ContainerAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** Optional parameters. */ +export declare interface ContainerBreakLeaseOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */ + breakPeriod?: number; +} +/** + * Options to configure Container - Break Lease operation. + */ +export declare interface ContainerBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Container - Change Lease operation. + */ +export declare interface ContainerChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export declare class ContainerClient extends StorageClient { + /** + * containerContext provided by protocol layer. + */ + private containerContext; + private _containerName; + /* + * The name of the container. + */ + readonly containerName: string; + /** + * + * Creates an instance of ContainerClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + create(options?: ContainerCreateOptions): Promise; + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - + */ + createIfNotExists(options?: ContainerCreateOptions): Promise; + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + exists(options?: ContainerExistsOptions): Promise; + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName: string): BlobClient; + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName: string): AppendBlobClient; + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName: string): BlockBlobClient; + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName: string): PageBlobClient; + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + getProperties(options?: ContainerGetPropertiesOptions): Promise; + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + delete(options?: ContainerDeleteMethodOptions): Promise; + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + deleteIfExists(options?: ContainerDeleteMethodOptions): Promise; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: ContainerSetMetadataOptions): Promise; + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + getAccessPolicy(options?: ContainerGetAccessPolicyOptions): Promise; + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + setAccessPolicy(access?: PublicAccessType, containerAcl?: SignedIdentifier[], options?: ContainerSetAccessPolicyOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + uploadBlockBlob(blobName: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise<{ + blockBlobClient: BlockBlobClient; + response: BlockBlobUploadResponse; + }>; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + deleteBlob(blobName: string, options?: ContainerDeleteBlobOptions): Promise; + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + private listBlobFlatSegment; + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + private listBlobHierarchySegment; + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options?: ContainerListBlobsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listHierarchySegments; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + private listItemsByHierarchy; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter: string, options?: ContainerListBlobsOptions): PagedAsyncIterableIterator<({ + kind: "prefix"; + } & BlobPrefix) | ({ + kind: "blob"; + } & BlobItem), ContainerListBlobHierarchySegmentResponse>; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator; + private getContainerNameFromUrl; + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient(): BlobBatchClient; +} +/** Defines headers for Container_create operation. */ +export declare interface ContainerCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link ContainerClient.createIfNotExists} operation. + */ +export declare interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse { + /** + * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure {@link ContainerClient.create} operation. + */ +export declare interface ContainerCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the container. + */ + metadata?: Metadata; + /** + * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include: + * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account. + * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. + */ + access?: PublicAccessType; + /** + * Container encryption scope info. + */ + containerEncryptionScope?: ContainerEncryptionScope; +} +/** Contains response data for the create operation. */ +export declare type ContainerCreateResponse = ContainerCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerCreateHeaders; + }; +}; +/** + * Options to configure the {@link ContainerClient.deleteBlob} operation. + */ +export declare interface ContainerDeleteBlobOptions extends BlobDeleteOptions { + /** + * An opaque DateTime value that, when present, specifies the version + * of the blob to delete. It's for service version 2019-10-10 and newer. + */ + versionId?: string; +} +/** Defines headers for Container_delete operation. */ +export declare interface ContainerDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link ContainerClient.deleteIfExists} operation. + */ +export declare interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse { + /** + * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place. + */ + succeeded: boolean; +} +/** + * Options to configure {@link ContainerClient.delete} operation. + */ +export declare interface ContainerDeleteMethodOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting the container. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the delete operation. */ +export declare type ContainerDeleteResponse = ContainerDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerDeleteHeaders; + }; +}; +/** Parameter group */ +export declare interface ContainerEncryptionScope { + /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */ + defaultEncryptionScope?: string; + /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */ + preventEncryptionScopeOverride?: boolean; +} +/** + * Options to configure {@link ContainerClient.exists} operation. + */ +export declare interface ContainerExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Defines headers for Container_filterBlobs operation. */ +export declare interface ContainerFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ContainerFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; +/** + * Options to configure {@link ContainerClient.generateSasUrl} operation. + */ +export declare interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: ContainerSASPermissions; +} +/** Defines headers for Container_getAccessPolicy operation. */ +export declare interface ContainerGetAccessPolicyHeaders { + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.getAccessPolicy} operation. + */ +export declare interface ContainerGetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** + * Contains response data for the {@link ContainerClient.getAccessPolicy} operation. + */ +export declare type ContainerGetAccessPolicyResponse = { + signedIdentifiers: SignedIdentifier[]; +} & ContainerGetAccessPolicyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerGetAccessPolicyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: SignedIdentifierModel[]; + }; +}; +/** Defines headers for Container_getProperties operation. */ +export declare interface ContainerGetPropertiesHeaders { + metadata?: { + [propertyName: string]: string; + }; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** Indicates whether the container has an immutability policy set on it. */ + hasImmutabilityPolicy?: boolean; + /** Indicates whether the container has a legal hold. */ + hasLegalHold?: boolean; + /** The default encryption scope for the container. */ + defaultEncryptionScope?: string; + /** Indicates whether the container's default encryption scope can be overriden. */ + denyEncryptionScopeOverride?: boolean; + /** Indicates whether version level worm is enabled on a container. */ + isImmutableStorageWithVersioningEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.getProperties} operation. + */ +export declare interface ContainerGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** Contains response data for the getProperties operation. */ +export declare type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerGetPropertiesHeaders; + }; +}; +/** An Azure Storage container */ +export declare interface ContainerItem { + name: string; + deleted?: boolean; + version?: string; + /** Properties of a container */ + properties: ContainerProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; +} +/** Defines headers for Container_listBlobFlatSegment operation. */ +export declare interface ContainerListBlobFlatSegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the listBlobFlatSegment operation. + */ +export declare type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse & ContainerListBlobFlatSegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobFlatSegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsFlatSegmentResponseModel; + }; +}; +/** Defines headers for Container_listBlobHierarchySegment operation. */ +export declare interface ContainerListBlobHierarchySegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the listBlobHierarchySegment operation. + */ +export declare type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse & ContainerListBlobHierarchySegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobHierarchySegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsHierarchySegmentResponseModel; + }; +}; +/** + * Options to configure Container - List Blobs operations. + * + * See: + * - {@link ContainerClient.listBlobsFlat} + * - {@link ContainerClient.listBlobsByHierarchy} + */ +export declare interface ContainerListBlobsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response. + */ + includeCopy?: boolean; + /** + * Specifies whether soft deleted blobs should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether blob metadata be returned in the response. + */ + includeMetadata?: boolean; + /** + * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response. + */ + includeSnapshots?: boolean; + /** + * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response. + */ + includeVersions?: boolean; + /** + * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response. + */ + includeUncommitedBlobs?: boolean; + /** + * Specifies whether blob tags be returned in the response. + */ + includeTags?: boolean; + /** + * Specifies whether deleted blob with versions be returned in the response. + */ + includeDeletedWithVersions?: boolean; + /** + * Specifies whether blob immutability policy be returned in the response. + */ + includeImmutabilityPolicy?: boolean; + /** + * Specifies whether blob legal hold be returned in the response. + */ + includeLegalHold?: boolean; +} +/** Properties of a container */ +export declare interface ContainerProperties { + lastModified: Date; + etag: string; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + publicAccess?: PublicAccessType; + hasImmutabilityPolicy?: boolean; + hasLegalHold?: boolean; + defaultEncryptionScope?: string; + preventEncryptionScopeOverride?: boolean; + deletedOn?: Date; + remainingRetentionDays?: number; + /** Indicates if version level worm is enabled on this container. */ + isImmutableStorageWithVersioningEnabled?: boolean; +} +/** + * Options to configure Container - Release Lease operation. + */ +export declare interface ContainerReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** Defines headers for Container_rename operation. */ +export declare interface ContainerRenameHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the rename operation. */ +export declare type ContainerRenameResponse = ContainerRenameHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerRenameHeaders; + }; +}; +/** + * Options to configure Container - Renew Lease operation. + */ +export declare interface ContainerRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Conditions to meet for the container. + */ +export declare interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions { +} +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): ContainerSASPermissions; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specifies List access granted. + */ + list: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString(): string; +} +/** + * A type that looks like a Container SAS permission. + * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface ContainerSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specifies List access granted. + */ + list?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags?: boolean; +} +/** Defines headers for Container_setAccessPolicy operation. */ +export declare interface ContainerSetAccessPolicyHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.setAccessPolicy} operation. + */ +export declare interface ContainerSetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting the access policy. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the setAccessPolicy operation. */ +export declare type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetAccessPolicyHeaders; + }; +}; +/** Defines headers for Container_setMetadata operation. */ +export declare interface ContainerSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.setMetadata} operation. + */ +export declare interface ContainerSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the setMetadata operation. */ +export declare type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetMetadataHeaders; + }; +}; +/** Defines headers for Container_restore operation. */ +export declare interface ContainerUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the restore operation. */ +export declare type ContainerUndeleteResponse = ContainerUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerUndeleteHeaders; + }; +}; +/** + * Defines the operations from a {@link BlobClient} that are needed for the poller + * returned by {@link BlobClient.beginCopyFromURL} to work. + */ +export declare type CopyPollerBlobClient = Pick & { + startCopyFromURL(copySource: string, options?: BlobStartCopyFromURLOptions): Promise; +}; +/** Defines values for CopyStatusType. */ +export declare type CopyStatusType = "pending" | "success" | "aborted" | "failed"; +/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */ +export declare interface CorsRule { + /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */ + allowedOrigins: string; + /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */ + allowedMethods: string; + /** the request headers that the origin domain may specify on the CORS request. */ + allowedHeaders: string; + /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */ + exposedHeaders: string; + /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ + maxAgeInSeconds: number; +} +/** Parameter group */ +export declare interface CpkInfo { + /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */ + encryptionKey?: string; + /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */ + encryptionKeySha256?: string; + /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is provided. */ + encryptionAlgorithm?: EncryptionAlgorithmType; +} +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +declare abstract class Credential_2 implements RequestPolicyFactory { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy; +} +export { Credential_2 as Credential }; +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export declare abstract class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; +} +/** + * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy. + */ +export declare type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy; +/** Defines values for DeleteSnapshotsOptionType. */ +export declare type DeleteSnapshotsOptionType = "include" | "only"; +export { deserializationPolicy }; +/** + * Defines values for EncryptionAlgorithmType. \ + * {@link KnownEncryptionAlgorithmType} can be used interchangeably with EncryptionAlgorithmType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **AES256** + */ +export declare type EncryptionAlgorithmType = string; +/** + * Blob info from a {@link BlobServiceClient.findBlobsByTags} + */ +export declare interface FilterBlobItem { + /** + * Blob Name. + */ + name: string; + /** + * Container Name. + */ + containerName: string; + /** + * Blob Tags. + */ + tags?: Tags; + /** + * Tag value. + * + * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags. + */ + tagValue: string; +} +/** Blob info from a Filter Blobs API call */ +export declare interface FilterBlobItemModel { + name: string; + containerName: string; + /** Blob tags */ + tags?: BlobTags; +} +/** + * Segment response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface FilterBlobSegment { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItem[]; + continuationToken?: string; +} +/** The result of a Filter Blobs API call */ +export declare interface FilterBlobSegmentModel { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItemModel[]; + continuationToken?: string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * Fill in the required details before running the following snippets. + * + * Example usage: + * + * ```js + * // Generate service level SAS for a container + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using an identifier: + * + * ```js + * // Generate service level SAS for a container with identifier + * // startsOn & permissions are optional when identifier is provided + * const identifier = "unique-id"; + * await containerClient.setAccessPolicy(undefined, [ + * { + * accessPolicy: { + * expiresOn: new Date(new Date().valueOf() + 86400), // Date type + * permissions: ContainerSASPermissions.parse("racwdl").toString(), + * startsOn: new Date() // Date type + * }, + * id: identifier + * } + * ]); + * + * const containerSAS = generateBlobSASQueryParameters( + * { + * containerName, // Required + * identifier // Required + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using a blob name: + * + * ```js + * // Generate service level SAS for a blob + * const blobSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * blobName, // Required + * permissions: BlobSASPermissions.parse("racwd"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * cacheControl: "cache-control-override", // Optional + * contentDisposition: "content-disposition-override", // Optional + * contentEncoding: "content-encoding-override", // Optional + * contentLanguage: "content-language-override", // Optional + * contentType: "content-type-override", // Optional + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required. + * + * Example usage: + * + * ```js + * // Generate user delegation SAS for a container + * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn); + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn, // Optional. Date type + * expiresOn, // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2018-11-09" // Must greater than or equal to 2018-11-09 to generate user delegation SAS + * }, + * userDelegationKey, // UserDelegationKey + * accountName + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @param accountName - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters; +/** Geo-Replication information for the Secondary Storage Service */ +export declare interface GeoReplication { + /** The status of the secondary location */ + status: GeoReplicationStatusType; + /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */ + lastSyncOn: Date; +} +/** Defines values for GeoReplicationStatusType. */ +export declare type GeoReplicationStatusType = "live" | "bootstrap" | "unavailable"; +export declare function getBlobServiceAccountAudience(storageAccountName: string): string; +/** + * Represents authentication information in Authorization, ProxyAuthorization, + * WWW-Authenticate, and Proxy-Authenticate header values. + */ +export declare interface HttpAuthorization { + /** + * The scheme to use for authorization. + */ + scheme: string; + /** + * the credentials containing the authentication information of the user agent for the resource being requested. + */ + value: string; +} +export { HttpHeaders }; +export { HttpOperationResponse }; +export { HttpRequestBody }; +export { IHttpClient }; +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export declare function isPipelineLike(pipeline: unknown): pipeline is PipelineLike; +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +export declare enum KnownEncryptionAlgorithmType { + AES256 = "AES256" +} +/** + * The details for a specific lease. + */ +export declare interface Lease { + /** + * The ETag contains a value that you can use to + * perform operations conditionally. If the request version is 2011-08-18 or + * newer, the ETag value will be in quotes. + */ + etag?: string; + /** + * Returns the date and time the container was + * last modified. Any operation that modifies the blob, including an update + * of the blob's metadata or properties, changes the last-modified time of + * the blob. + */ + lastModified?: Date; + /** + * Uniquely identifies a container's lease + */ + leaseId?: string; + /** + * Approximate time remaining in the lease + * period, in seconds. + */ + leaseTime?: number; + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + */ + requestId?: string; + /** + * Indicates the version of the Blob service used + * to execute the request. This header is returned for requests made against + * version 2009-09-19 and above. + */ + version?: string; + /** + * UTC date/time value generated by the service that + * indicates the time at which the response was initiated + */ + date?: Date; + /** + * Error code if any associated with the response that returned + * the Lease information. + */ + errorCode?: string; +} +/** Parameter group */ +export declare interface LeaseAccessConditions { + /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */ + leaseId?: string; +} +/** Defines values for LeaseDurationType. */ +export declare type LeaseDurationType = "infinite" | "fixed"; +/** + * Configures lease operations. + */ +export declare interface LeaseOperationOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Contains the response data for operations that create, modify, or delete a lease. + * + * See {@link BlobLeaseClient}. + */ +export declare type LeaseOperationResponse = Lease & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: Lease; + }; +}; +/** Defines values for LeaseStateType. */ +export declare type LeaseStateType = "available" | "leased" | "expired" | "breaking" | "broken"; +/** Defines values for LeaseStatusType. */ +export declare type LeaseStatusType = "locked" | "unlocked"; +/** + * An enumeration of blobs + */ +export declare interface ListBlobsFlatSegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegment; + continuationToken?: string; +} +/** An enumeration of blobs */ +export declare interface ListBlobsFlatSegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegmentModel; + continuationToken?: string; +} +/** + * An enumeration of blobs + */ +export declare interface ListBlobsHierarchySegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegment; + continuationToken?: string; +} +/** An enumeration of blobs */ +export declare interface ListBlobsHierarchySegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegmentModel; + continuationToken?: string; +} +/** An enumeration of containers */ +export declare interface ListContainersSegmentResponse { + serviceEndpoint: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + containerItems: ContainerItem[]; + continuationToken?: string; +} +/** + * The `@azure/logger` configuration for this package. + */ +export declare const logger: AzureLogger; +/** Azure Analytics Logging settings. */ +export declare interface Logging { + /** The version of Storage Analytics to configure. */ + version: string; + /** Indicates whether all delete requests should be logged. */ + deleteProperty: boolean; + /** Indicates whether all read requests should be logged. */ + read: boolean; + /** Indicates whether all write requests should be logged. */ + write: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy: RetentionPolicy; +} +/** + * Specifies HTTP options for conditional requests based on ETag matching. + */ +export declare interface MatchConditions { + /** + * Specify an ETag value to operate only on blobs with a matching value. + */ + ifMatch?: string; + /** + * Specify an ETag value to operate only on blobs without a matching value. + */ + ifNoneMatch?: string; +} +/** + * A map of name-value pairs to associate with the resource. + */ +export declare interface Metadata { + /** + * A name-value pair. + */ + [propertyName: string]: string; +} +/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ +export declare interface Metrics { + /** The version of Storage Analytics to configure. */ + version?: string; + /** Indicates whether metrics are enabled for the Blob service. */ + enabled: boolean; + /** Indicates whether metrics should generate summary statistics for called API operations. */ + includeAPIs?: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy?: RetentionPolicy; +} +/** + * Specifies HTTP options for conditional requests based on modification time. + */ +export declare interface ModificationConditions { + /** + * Specify this header value to operate only on a blob if it has been modified since the + * specified date/time. + */ + ifModifiedSince?: Date; + /** + * Specify this header value to operate only on a blob if it has not been modified since the + * specified date/time. + */ + ifUnmodifiedSince?: Date; +} +/** + * standard HTTP conditional headers and tags condition. + */ +export declare interface ModifiedAccessConditions extends MatchConditions, ModificationConditions, TagConditions { +} +/** Parameter group */ +export declare interface ModifiedAccessConditionsModel { + /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */ + ifModifiedSince?: Date; + /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */ + ifUnmodifiedSince?: Date; + /** Specify an ETag value to operate only on blobs with a matching value. */ + ifMatch?: string; + /** Specify an ETag value to operate only on blobs without a matching value. */ + ifNoneMatch?: string; + /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */ + ifTags?: string; +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export declare function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline; +/** + * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}. + * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the + * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses + * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}. + */ +export declare interface ObjectReplicationPolicy { + /** + * The Object Replication Policy ID. + */ + policyId: string; + /** + * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID. + */ + rules: ObjectReplicationRule[]; +} +/** + * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob. + * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}. + */ +export declare interface ObjectReplicationRule { + /** + * The Object Replication Rule ID. + */ + ruleId: string; + /** + * The Replication Status + */ + replicationStatus: ObjectReplicationStatus; +} +/** + * Specifies the Replication Status of a blob. This is used when a storage account has + * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}. + */ +export declare type ObjectReplicationStatus = "complete" | "failed"; +/** Defines headers for PageBlob_clearPages operation. */ +export declare interface PageBlobClearPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.clearPages} operation. + */ +export declare interface PageBlobClearPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when clearing pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the clearPages operation. */ +export declare type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobClearPagesHeaders; + }; +}; +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export declare class PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + private pageBlobContext; + /** + * + * Creates an instance of PageBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * + * @param url - A URL string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): PageBlobClient; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + create(size: number, options?: PageBlobCreateOptions): Promise; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + createIfNotExists(size: number, options?: PageBlobCreateIfNotExistsOptions): Promise; + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + uploadPages(body: HttpRequestBody, offset: number, count: number, options?: PageBlobUploadPagesOptions): Promise; + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + uploadPagesFromURL(sourceURL: string, sourceOffset: number, destOffset: number, count: number, options?: PageBlobUploadPagesFromURLOptions): Promise; + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + clearPages(offset?: number, count?: number, options?: PageBlobClearPagesOptions): Promise; + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + getPageRanges(offset?: number, count?: number, options?: PageBlobGetPageRangesOptions): Promise; + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + private listPageRangesSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItems; + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset?: number, count?: number, options?: PageBlobListPageRangesOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangesDiffSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItems; + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobListPageRangesDiffOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiffForManagedDisks(offset: number, count: number, prevSnapshotUrl: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + resize(size: number, options?: PageBlobResizeOptions): Promise; + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + updateSequenceNumber(sequenceNumberAction: SequenceNumberActionType, sequenceNumber?: number, options?: PageBlobUpdateSequenceNumberOptions): Promise; + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + startCopyIncremental(copySource: string, options?: PageBlobStartCopyIncrementalOptions): Promise; +} +/** Defines headers for PageBlob_copyIncremental operation. */ +export declare interface PageBlobCopyIncrementalHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the copyIncremental operation. */ +export declare type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCopyIncrementalHeaders; + }; +}; +/** Defines headers for PageBlob_create operation. */ +export declare interface PageBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; +} +/** + * Contains response data for the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure the {@link PageBlobClient.create} operation. + */ +export declare interface PageBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating a page blob. + */ + conditions?: BlobRequestConditions; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the create operation. */ +export declare type PageBlobCreateResponse = PageBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCreateHeaders; + }; +}; +/** Defines headers for PageBlob_getPageRangesDiff operation. */ +export declare interface PageBlobGetPageRangesDiffHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.getRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; + /** + * (unused) + */ + range?: string; +} +/** + * Contains response data for the {@link BlobClient.getPageRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffResponse extends PageList, PageBlobGetPageRangesDiffHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} +/** Contains response data for the getPageRangesDiff operation. */ +export declare type PageBlobGetPageRangesDiffResponseModel = PageBlobGetPageRangesDiffHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + }; +}; +/** Defines headers for PageBlob_getPageRanges operation. */ +export declare interface PageBlobGetPageRangesHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} +/** + * Contains response data for the {@link BlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} +/** Contains response data for the getPageRanges operation. */ +export declare type PageBlobGetPageRangesResponseModel = PageBlobGetPageRangesHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesHeaders; + }; +}; +/** + * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation. + */ +export declare interface PageBlobListPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; +} +/** + * Options to configure the {@link PageBlobClient.listPageRanges} operation. + */ +export declare interface PageBlobListPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} +/** + * Conditions to add to the creation of this page blob. + */ +export declare interface PageBlobRequestConditions extends BlobRequestConditions, SequenceNumberAccessConditions { +} +/** Defines headers for PageBlob_resize operation. */ +export declare interface PageBlobResizeHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.resize} operation. + */ +export declare interface PageBlobResizeOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when resizing a page blob. + */ + conditions?: BlobRequestConditions; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the resize operation. */ +export declare type PageBlobResizeResponse = PageBlobResizeHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobResizeHeaders; + }; +}; +/** + * Options to configure {@link PageBlobClient.startCopyIncremental} operation. + */ +export declare interface PageBlobStartCopyIncrementalOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when starting a copy incremental operation. + */ + conditions?: ModifiedAccessConditions; +} +/** Defines headers for PageBlob_updateSequenceNumber operation. */ +export declare interface PageBlobUpdateSequenceNumberHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.updateSequenceNumber} operation. + */ +export declare interface PageBlobUpdateSequenceNumberOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: BlobRequestConditions; +} +/** Contains response data for the updateSequenceNumber operation. */ +export declare type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUpdateSequenceNumberHeaders; + }; +}; +/** Defines headers for PageBlob_uploadPagesFromURL operation. */ +export declare interface PageBlobUploadPagesFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation. + */ +export declare interface PageBlobUploadPagesFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: PageBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the uploadPagesFromURL operation. */ +export declare type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesFromURLHeaders; + }; +}; +/** Defines headers for PageBlob_uploadPages operation. */ +export declare interface PageBlobUploadPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.uploadPages} operation. + */ +export declare interface PageBlobUploadPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Callback to receive events on the progress of upload pages operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the uploadPages operation. */ +export declare type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesHeaders; + }; +}; +/** + * List of page ranges for a blob. + */ +export declare interface PageList { + /** + * Valid non-overlapping page ranges. + */ + pageRange?: Range_2[]; + /** + * Present if the prevSnapshot parameter was specified and there were cleared + * pages between the previous snapshot and the target snapshot. + */ + clearRange?: Range_2[]; +} +/** the list of pages */ +declare interface PageList_2 { + pageRange?: PageRange[]; + clearRange?: ClearRange[]; + continuationToken?: string; +} +declare interface PageRange { + start: number; + end: number; +} +export declare interface PageRangeInfo { + start: number; + end: number; + isClear: boolean; +} +/** + * The multipart/mixed response which contains the response for each subrequest. + */ +export declare interface ParsedBatchResponse { + /** + * The parsed sub responses. + */ + subResponses: BatchSubResponse[]; + /** + * The succeeded executed sub responses' count; + */ + subResponsesSucceededCount: number; + /** + * The failed executed sub responses' count; + */ + subResponsesFailedCount: number; +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare class Pipeline implements PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories: RequestPolicyFactory[], options?: PipelineOptions); + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} +/** + * An interface for the {@link Pipeline} class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare interface PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} +/** + * Option interface for Pipeline constructor. + */ +export declare interface PipelineOptions { + /** + * Optional. Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; +} +export { PollerLike }; +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export declare interface PollerLikeWithCancellation, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} +export { PollOperationState }; +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export declare enum PremiumPageBlobTier { + /** + * P4 Tier. + */ + P4 = "P4", + /** + * P6 Tier. + */ + P6 = "P6", + /** + * P10 Tier. + */ + P10 = "P10", + /** + * P15 Tier. + */ + P15 = "P15", + /** + * P20 Tier. + */ + P20 = "P20", + /** + * P30 Tier. + */ + P30 = "P30", + /** + * P40 Tier. + */ + P40 = "P40", + /** + * P50 Tier. + */ + P50 = "P50", + /** + * P60 Tier. + */ + P60 = "P60", + /** + * P70 Tier. + */ + P70 = "P70", + /** + * P80 Tier. + */ + P80 = "P80" +} +/** Defines values for PublicAccessType. */ +export declare type PublicAccessType = "container" | "blob"; +/** + * Range for Blob Service Operations. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations + */ +declare interface Range_2 { + /** + * StartByte, larger than or equal 0. + */ + offset: number; + /** + * Optional. Count of bytes, larger than 0. + * If not provided, will return bytes from offset to the end. + */ + count?: number; +} +export { Range_2 as Range }; +/** Defines values for RehydratePriority. */ +export declare type RehydratePriority = "High" | "Standard"; +export { RequestPolicy }; +export { RequestPolicyFactory }; +export { RequestPolicyOptions }; +export { RestError }; +/** the retention policy which determines how long the associated data should persist */ +export declare interface RetentionPolicy { + /** Indicates whether a retention policy is enabled for the storage service */ + enabled: boolean; + /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */ + days?: number; +} +/** + * Allowed IP range for a SAS. + */ +export declare interface SasIPRange { + /** + * Starting IP address in the IP range. + * If end IP doesn't provide, start IP will the only IP allowed. + */ + start: string; + /** + * Optional. IP address that ends the IP range. + * If not provided, start IP will the only IP allowed. + */ + end?: string; +} +/** + * Protocols for generated SAS. + */ +export declare enum SASProtocol { + /** + * Protocol that allows HTTPS only + */ + Https = "https", + /** + * Protocol that allows both HTTPS and HTTP + */ + HttpsAndHttp = "https,http" +} +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export declare class SASQueryParameters { + /** + * The storage API version. + */ + readonly version: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + readonly protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + readonly startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + readonly expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + readonly permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + readonly services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + readonly resourceTypes?: string; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + readonly identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + readonly encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + readonly resource?: string; + /** + * The signature for the SAS token. + */ + readonly signature: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + readonly cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + readonly contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + readonly contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + readonly contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + readonly contentType?: string; + /** + * Inner value of getter ipRange. + */ + private readonly ipRangeInner?; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + private readonly signedOid?; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + private readonly signedTenantId?; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + private readonly signedStartsOn?; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + private readonly signedExpiresOn?; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + private readonly signedService?; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + private readonly signedVersion?; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + readonly preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + readonly correlationId?: string; + /* + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + readonly ipRange: SasIPRange | undefined; + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param permissions - Representing the storage permissions + * @param services - Representing the storage services being accessed (only for Account SAS) + * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS) + * @param protocol - Representing the allowed HTTP protocol(s) + * @param startsOn - Representing the start time for this SAS token + * @param expiresOn - Representing the expiry time for this SAS token + * @param ipRange - Representing the range of valid IP addresses for this SAS token + * @param identifier - Representing the signed identifier (only for Service SAS) + * @param resource - Representing the storage container or blob (only for Service SAS) + * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS) + * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS) + * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS) + * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS) + * @param contentType - Representing the content-type header (only for Blob/File Service SAS) + * @param userDelegationKey - Representing the user delegation key properties + * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS) + * @param correlationId - Representing the correlation ID (only for User Delegation SAS) + * @param encryptionScope - + */ + constructor(version: string, signature: string, permissions?: string, services?: string, resourceTypes?: string, protocol?: SASProtocol, startsOn?: Date, expiresOn?: Date, ipRange?: SasIPRange, identifier?: string, resource?: string, cacheControl?: string, contentDisposition?: string, contentEncoding?: string, contentLanguage?: string, contentType?: string, userDelegationKey?: UserDelegationKey, preauthorizedAgentObjectId?: string, correlationId?: string, encryptionScope?: string); + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param options - Optional. Options to construct the SASQueryParameters. + */ + constructor(version: string, signature: string, options?: SASQueryParametersOptions); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString(): string; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + private tryAppendQueryParameter; +} +/** + * Options to construct {@link SASQueryParameters}. + */ +export declare interface SASQueryParametersOptions { + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes?: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource?: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType?: string; + /** + * User delegation key properties. + */ + userDelegationKey?: UserDelegationKey; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}. + * This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId?: string; +} +/** Parameter group */ +export declare interface SequenceNumberAccessConditions { + /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */ + ifSequenceNumberLessThanOrEqualTo?: number; + /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */ + ifSequenceNumberLessThan?: number; + /** Specify this header value to operate only on a blob if it has the specified sequence number. */ + ifSequenceNumberEqualTo?: number; +} +/** Defines values for SequenceNumberActionType. */ +export declare type SequenceNumberActionType = "max" | "update" | "increment"; +/** Defines headers for Service_filterBlobs operation. */ +export declare interface ServiceFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ServiceFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment & ServiceFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; +/** + * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation. + */ +export declare interface ServiceGenerateAccountSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} +/** Defines headers for Service_getAccountInfo operation. */ +export declare interface ServiceGetAccountInfoHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Identifies the sku name of the account */ + skuName?: SkuName; + /** Identifies the account kind */ + accountKind?: AccountKind; + /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */ + isHierarchicalNamespaceEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getAccountInfo} operation. + */ +export declare interface ServiceGetAccountInfoOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getAccountInfo operation. */ +export declare type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetAccountInfoHeaders; + }; +}; +/** Defines headers for Service_getProperties operation. */ +export declare interface ServiceGetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getProperties} operation. + */ +export declare interface ServiceGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getProperties operation. */ +export declare type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders & BlobServiceProperties & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceProperties; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetPropertiesHeaders; + }; +}; +/** Defines headers for Service_getStatistics operation. */ +export declare interface ServiceGetStatisticsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getStatistics} operation. + */ +export declare interface ServiceGetStatisticsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getStatistics operation. */ +export declare type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders & BlobServiceStatistics & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceStatistics; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetStatisticsHeaders; + }; +}; +/** Defines headers for Service_getUserDelegationKey operation. */ +export declare interface ServiceGetUserDelegationKeyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the Service - Get User Delegation Key. + */ +export declare interface ServiceGetUserDelegationKeyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * Contains response data for the {@link getUserDelegationKey} operation. + */ +export declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceGetUserDelegationKeyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: UserDelegationKeyModel; + }; +}; +/** + * Options to configure the {@link BlobServiceClient.listContainers} operation. + */ +export declare interface ServiceListContainersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether the container's metadata + * should be returned as part of the response body. + */ + includeMetadata?: boolean; + /** + * Specifies whether soft deleted containers should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether system containers should be included in the response. + */ + includeSystem?: boolean; +} +/** Defines headers for Service_listContainersSegment operation. */ +export declare interface ServiceListContainersSegmentHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the listContainersSegment operation. */ +export declare type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders & ListContainersSegmentResponse & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: ListContainersSegmentResponse; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceListContainersSegmentHeaders; + }; +}; +/** + * Options to configure {@link BlobServiceClient.renameContainer} operation. + */ +export declare interface ServiceRenameContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Condition to meet for the source container. + */ + sourceCondition?: LeaseAccessConditions; +} +/** Defines headers for Service_setProperties operation. */ +export declare interface ServiceSetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.setProperties} operation. + */ +export declare interface ServiceSetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the setProperties operation. */ +export declare type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSetPropertiesHeaders; + }; +}; +/** Defines headers for Service_submitBatch operation. */ +export declare interface ServiceSubmitBatchHeaders { + /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */ + contentType?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** Error Code */ + errorCode?: string; +} +/** Optional parameters. */ +export declare interface ServiceSubmitBatchOptionalParamsModel extends coreHttp.OperationOptions { + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; +} +/** Contains response data for the submitBatch operation. */ +export declare type ServiceSubmitBatchResponseModel = ServiceSubmitBatchHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; +/** + * Options to configure {@link BlobServiceClient.undeleteContainer} operation. + */ +export declare interface ServiceUndeleteContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies the new name of the restored container. + * Will use its original name if this is not specified. + * @deprecated Restore container to a different name is not supported by service anymore. + */ + destinationContainerName?: string; +} +/** + * Signed identifier. + */ +export declare interface SignedIdentifier { + /** + * a unique id + */ + id: string; + /** + * Access Policy + */ + accessPolicy: { + /** + * Optional. The date-time the policy is active + */ + startsOn?: Date; + /** + * Optional. The date-time the policy expires + */ + expiresOn?: Date; + /** + * The permissions for the acl policy + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + */ + permissions?: string; + }; +} +/** signed identifier */ +export declare interface SignedIdentifierModel { + /** a unique id */ + id: string; + /** An Access policy */ + accessPolicy: AccessPolicy; +} +/** Defines values for SkuName. */ +export declare type SkuName = "Standard_LRS" | "Standard_GRS" | "Standard_RAGRS" | "Standard_ZRS" | "Premium_LRS"; +/** The properties that enable an account to host a static website */ +export declare interface StaticWebsite { + /** Indicates whether this account is hosting a static website */ + enabled: boolean; + /** The default name of the index page under each directory */ + indexDocument?: string; + /** The absolute path of the custom 404 page */ + errorDocument404Path?: string; + /** Absolute path of the default index page */ + defaultIndexDocumentPath?: string; +} +/** + * Defines the known cloud audiences for Storage. + */ +export declare enum StorageBlobAudience { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageOAuthScopes = "https://storage.azure.com/.default", + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + DiskComputeOAuthScopes = "https://disk.compute.azure.com/.default" +} +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export declare class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; +} +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export declare class StorageBrowserPolicyFactory implements RequestPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy; +} +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +declare abstract class StorageClient { + /** + * Encoded URL string value. + */ + readonly url: string; + readonly accountName: string; + /* Excluded from this release type: pipeline */ + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + protected readonly storageClientContext: StorageClientContext; + /** + */ + protected readonly isHttps: boolean; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + protected constructor(url: string, pipeline: PipelineLike); +} +declare class StorageClientContext extends coreHttp.ServiceClient { + url: string; + version: string; + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url: string, options?: StorageClientOptionalParams); +} +/** Optional parameters. */ +declare interface StorageClientOptionalParams extends coreHttp.ServiceClientOptions { + /** Specifies the version of the operation to use for this request. */ + version?: string; + /** Overrides client endpoint. */ + endpoint?: string; +} +/** + * The OAuth scope to use with Azure Storage. + */ +export declare const StorageOAuthScopes: string | string[]; +/** + * Options interface for the {@link newPipeline} function. + */ +export declare interface StoragePipelineOptions { + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; + /** + * Configures the built-in retry policy behavior. + */ + retryOptions?: StorageRetryOptions; + /** + * Keep alive configurations. Default keep-alive is enabled. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; + /** + * The audience used to retrieve an AAD token. + * By default, audience 'https://storage.azure.com/.default' will be used. + */ + audience?: string | string[]; +} +/** + * Storage Blob retry options interface. + */ +export declare interface StorageRetryOptions { + /** + * Optional. StorageRetryPolicyType, default is exponential retry policy. + */ + readonly retryPolicyType?: StorageRetryPolicyType; + /** + * Optional. Max try number of attempts, default is 4. + * A value of 1 means 1 try and no retries. + * A value smaller than 1 means default retry number of attempts. + */ + readonly maxTries?: number; + /** + * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request. + * A value of zero or undefined means no default timeout on SDK client, Azure + * Storage server's default timeout policy will be used. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations + */ + readonly tryTimeoutInMs?: number; + /** + * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms). + * The delay increases (exponentially or linearly) with each retry up to a maximum specified by + * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs. + */ + readonly retryDelayInMs?: number; + /** + * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms). + * If you specify 0, then you must also specify 0 for retryDelayInMs. + */ + readonly maxRetryDelayInMs?: number; + /** + * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined + * (the default) then operations are not retried against another host. + * + * NOTE: Before setting this field, make sure you understand the issues around + * reading stale and potentially-inconsistent data at + * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs} + */ + readonly secondaryHost?: string; +} +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export declare class StorageRetryPolicy extends BaseRequestPolicy { + /** + * RetryOptions. + */ + private readonly retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions); + /** + * Sends request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean; + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + private delay; +} +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export declare class StorageRetryPolicyFactory implements RequestPolicyFactory { + private retryOptions?; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions?: StorageRetryOptions); + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy; +} +/** + * RetryPolicy types. + */ +export declare enum StorageRetryPolicyType { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + EXPONENTIAL = 0, + /** + * Linear retry. Retry time delay grows linearly. + */ + FIXED = 1 +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export declare class StorageSharedKeyCredential extends Credential_2 { + /** + * Azure Storage account name; readonly. + */ + readonly accountName: string; + /** + * Azure Storage account key; readonly. + */ + private readonly accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName: string, accountKey: string); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign: string): string; +} +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export declare class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + private readonly factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential); + /** + * Signs request. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + private getHeaderValueToSign; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + private getCanonicalizedHeadersString; + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + private getCanonicalizedResourceString; +} +/** Defines values for SyncCopyStatusType. */ +export declare type SyncCopyStatusType = "success"; +/** + * Specifies HTTP options for conditional requests based on blob tags. + */ +export declare interface TagConditions { + /** + * Optional SQL statement to apply to the tags of the blob. + */ + tagConditions?: string; +} +/** + * Blob tags. + */ +export declare type Tags = Record; +/** + * A user delegation key. + */ +export declare interface UserDelegationKey { + /** + * The Azure Active Directory object ID in GUID format. + */ + signedObjectId: string; + /** + * The Azure Active Directory tenant ID in GUID format. + */ + signedTenantId: string; + /** + * The date-time the key is active. + */ + signedStartsOn: Date; + /** + * The date-time the key expires. + */ + signedExpiresOn: Date; + /** + * Abbreviation of the Azure Storage service that accepts the key. + */ + signedService: string; + /** + * The service version that created the key. + */ + signedVersion: string; + /** + * The key as a base64 string. + */ + value: string; +} +/** A user delegation key */ +export declare interface UserDelegationKeyModel { + /** The Azure Active Directory object ID in GUID format. */ + signedObjectId: string; + /** The Azure Active Directory tenant ID in GUID format */ + signedTenantId: string; + /** The date-time the key is active */ + signedStartsOn: string; + /** The date-time the key expires */ + signedExpiresOn: string; + /** Abbreviation of the Azure Storage service that accepts the key */ + signedService: string; + /** The service version that created the key */ + signedVersion: string; + /** The key as a base64 string */ + value: string; +} +export { WebResource }; +export {}; diff --git a/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts b/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts new file mode 100644 index 0000000..75cab26 --- /dev/null +++ b/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts @@ -0,0 +1,9804 @@ +/// + +import { AbortSignalLike } from '@azure/abort-controller'; +import { AzureLogger } from '@azure/logger'; +import { BaseRequestPolicy } from '@azure/core-http'; +import { CancelOnProgress } from '@azure/core-lro'; +import * as coreHttp from '@azure/core-http'; +import { deserializationPolicy } from '@azure/core-http'; +import { HttpHeaders } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { HttpRequestBody } from '@azure/core-http'; +import { HttpResponse } from '@azure/core-http'; +import { HttpClient as IHttpClient } from '@azure/core-http'; +import { KeepAliveOptions } from '@azure/core-http'; +import { OperationTracingOptions } from '@azure/core-tracing'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { PollerLike } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { ProxyOptions } from '@azure/core-http'; +import { Readable } from 'stream'; +import { RequestPolicy } from '@azure/core-http'; +import { RequestPolicyFactory } from '@azure/core-http'; +import { RequestPolicyOptions } from '@azure/core-http'; +import { RestError } from '@azure/core-http'; +import { ServiceClientOptions } from '@azure/core-http'; +import { TokenCredential } from '@azure/core-http'; +import { TransferProgressEvent } from '@azure/core-http'; +import { UserAgentOptions } from '@azure/core-http'; +import { WebResource } from '@azure/core-http'; + +/** An Access policy */ +export declare interface AccessPolicy { + /** the date-time the policy is active */ + startsOn?: string; + /** the date-time the policy expires */ + expiresOn?: string; + /** the permissions for the acl policy */ + permissions?: string; +} + +/** Defines values for AccessTier. */ +export declare type AccessTier = "P4" | "P6" | "P10" | "P15" | "P20" | "P30" | "P40" | "P50" | "P60" | "P70" | "P80" | "Hot" | "Cool" | "Archive" | "Cold"; + +/** Defines values for AccountKind. */ +export declare type AccountKind = "Storage" | "BlobStorage" | "StorageV2" | "FileStorage" | "BlockBlobStorage"; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class AccountSASPermissions { + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions: string): AccountSASPermissions; + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions; + /** + * Permission to read resources and list queues and tables granted. + */ + read: boolean; + /** + * Permission to write resources granted. + */ + write: boolean; + /** + * Permission to create blobs and files granted. + */ + delete: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add: boolean; + /** + * Permission to create blobs and files granted. + */ + create: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update: boolean; + /** + * Permission to get and delete messages granted. + */ + process: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Permission to filter blobs. + */ + filter: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} + +/** + * A type that looks like an account SAS permission. + * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface AccountSASPermissionsLike { + /** + * Permission to read resources and list queues and tables granted. + */ + read?: boolean; + /** + * Permission to write resources granted. + */ + write?: boolean; + /** + * Permission to delete blobs and files granted. + */ + delete?: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion?: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list?: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add?: boolean; + /** + * Permission to create blobs and files granted. + */ + create?: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update?: boolean; + /** + * Permission to get and delete messages granted. + */ + process?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Permission to filter blobs. + */ + filter?: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export declare class AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes: string): AccountSASResourceTypes; + /** + * Permission to access service level APIs granted. + */ + service: boolean; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container: boolean; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object: boolean; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export declare class AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services: string): AccountSASServices; + /** + * Permission to access blob resources granted. + */ + blob: boolean; + /** + * Permission to access file resources granted. + */ + file: boolean; + /** + * Permission to access queue resources granted. + */ + queue: boolean; + /** + * Permission to access table resources granted. + */ + table: boolean; + /** + * Converts the given services to a string. + * + */ + toString(): string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once + * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation + * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters} + * exist because the former is mutable and a logical representation while the latter is immutable and used to generate + * actual REST requests. + * + * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + * for more conceptual information on SAS + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * for descriptions of the parameters, including which are required + */ +export declare interface AccountSASSignatureValues { + /** + * If not provided, this defaults to the service version targeted by this version of the library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * The time after which the SAS will no longer work. + */ + expiresOn: Date; + /** + * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help + * constructing the permissions string. + */ + permissions: AccountSASPermissions; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to + * construct this value. + */ + services: string; + /** + * The values that indicate the resource types accessible with this SAS. Please refer + * to {@link AccountSASResourceTypes} to construct this value. + */ + resourceTypes: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} + +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export declare class AnonymousCredential extends Credential_2 { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy; +} + +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export declare class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); +} + +/** Defines headers for AppendBlob_appendBlockFromUrl operation. */ +export declare interface AppendBlobAppendBlockFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation. + */ +export declare interface AppendBlobAppendBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the appendBlockFromUrl operation. */ +export declare type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockFromUrlHeaders; + }; +}; + +/** Defines headers for AppendBlob_appendBlock operation. */ +export declare interface AppendBlobAppendBlockHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link AppendBlobClient.appendBlock} operation. + */ +export declare interface AppendBlobAppendBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Callback to receive events on the progress of append block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the appendBlock operation. */ +export declare type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockHeaders; + }; +}; + +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export declare class AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + private appendBlobContext; + /** + * + * Creates an instance of AppendBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): AppendBlobClient; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + create(options?: AppendBlobCreateOptions): Promise; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + createIfNotExists(options?: AppendBlobCreateIfNotExistsOptions): Promise; + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + seal(options?: AppendBlobSealOptions): Promise; + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + appendBlock(body: HttpRequestBody, contentLength: number, options?: AppendBlobAppendBlockOptions): Promise; + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + appendBlockFromURL(sourceURL: string, sourceOffset: number, count: number, options?: AppendBlobAppendBlockFromURLOptions): Promise; +} + +/** Defines headers for AppendBlob_create operation. */ +export declare interface AppendBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link AppendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * HTTP headers to set when creating append blobs. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; +} + +/** + * Contains response data for the {@link appendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link AppendBlobClient.create} operation. + */ +export declare interface AppendBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating append blobs. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when creating append blobs. A common header + * to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the create operation. */ +export declare type AppendBlobCreateResponse = AppendBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobCreateHeaders; + }; +}; + +/** + * Conditions to add to the creation of this append blob. + */ +export declare interface AppendBlobRequestConditions extends BlobRequestConditions, AppendPositionAccessConditions { +} + +/** + * Options to configure {@link AppendBlobClient.seal} operation. + */ +export declare interface AppendBlobSealOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet. + */ + conditions?: AppendBlobRequestConditions; +} + +/** Parameter group */ +export declare interface AppendPositionAccessConditions { + /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + maxSize?: number; + /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + appendPosition?: number; +} + +/** Defines values for ArchiveStatus. */ +export declare type ArchiveStatus = "rehydrate-pending-to-hot" | "rehydrate-pending-to-cool" | "rehydrate-pending-to-cold"; + +export { BaseRequestPolicy } + +/** + * A request associated with a batch operation. + */ +export declare interface BatchSubRequest { + /** + * The URL of the resource to request operation. + */ + url: string; + /** + * The credential used for sub request. + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. + * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; +} + +/** + * The response data associated with a single request within a batch operation. + */ +export declare interface BatchSubResponse { + /** + * The status code of the sub operation. + */ + status: number; + /** + * The status message of the sub operation. + */ + statusMessage: string; + /** + * The error code of the sub operation, if the sub operation failed. + */ + errorCode?: string; + /** + * The HTTP response headers. + */ + headers: HttpHeaders; + /** + * The body as text. + */ + bodyAsText?: string; + /** + * The batch sub request corresponding to the sub response. + */ + _request: BatchSubRequest; +} + +/** Defines headers for Blob_abortCopyFromURL operation. */ +export declare interface BlobAbortCopyFromURLHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.abortCopyFromURL} operation. + */ +export declare interface BlobAbortCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** Contains response data for the abortCopyFromURL operation. */ +export declare type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobAbortCopyFromURLHeaders; + }; +}; + +/** + * Options to configure Blob - Acquire Lease operation. + */ +export declare interface BlobAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export declare class BlobBatch { + private batchRequest; + private readonly batch; + private batchType; + constructor(); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType(): string; + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody(): string; + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests(): Map; + private addSubRequestInternal; + private setBatchType; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlob(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param options - + */ + deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobAccessTier(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param tier - + * @param options - + */ + setBlobAccessTier(blobClient: BlobClient, tier: AccessTier, options?: BlobSetTierOptions): Promise; +} + +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export declare class BlobBatchClient { + private serviceOrContainerContext; + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch(): BlobBatch; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operations will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resources to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlobs(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation(subrequest) will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs to delete. + * @param options - + */ + deleteBlobs(blobClients: BlobClient[], options?: BlobDeleteOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobsAccessTier(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs which should have a new tier set. + * @param tier - + * @param options - + */ + setBlobsAccessTier(blobClients: BlobClient[], tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + submitBatch(batchRequest: BlobBatch, options?: BlobBatchSubmitBatchOptionalParams): Promise; +} + +/** + * Contains response data for the {@link deleteBlobs} operation. + */ +export declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse; + +/** + * Contains response data for the {@link setBlobsAccessTier} operation. + */ +export declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse; + +/** + * Options to configure the Service - Submit Batch Optional Params. + */ +export declare interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel { +} + +/** + * Contains response data for blob batch operations. + */ +export declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse & ServiceSubmitBatchHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions { + /** + * The amount of time in milliseconds the poller should wait between + * calls to the service to determine the status of the Blob copy. + * Defaults to 15 seconds. + */ + intervalInMs?: number; + /** + * Callback to receive the state of the copy progress. + */ + onProgress?: (state: BlobBeginCopyFromUrlPollState) => void; + /** + * Serialized poller state that can be used to resume polling from. + * This may be useful when starting a copy on one process or thread + * and you wish to continue polling on another process or thread. + * + * To get serialized poller state, call `poller.toString()` on an existing + * poller. + */ + resumeFrom?: string; +} + +/** + * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}. + * + * This state is passed into the user-specified `onProgress` callback + * whenever copy progress is detected. + */ +export declare interface BlobBeginCopyFromUrlPollState extends PollOperationState { + /** + * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}. + */ + readonly blobClient: CopyPollerBlobClient; + /** + * The copyId that identifies the in-progress blob copy. + */ + copyId?: string; + /** + * the progress of the blob copy as reported by the service. + */ + copyProgress?: string; + /** + * The source URL provided in {@link BlobClient.beginCopyFromURL}. + */ + copySource: string; + /** + * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call. + * This is exposed for the poller and should not be modified directly. + */ + readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions; +} + +/** + * Contains response data for the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse { +} + +/** + * Options to configure Blob - Break Lease operation. + */ +export declare interface BlobBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Blob - Change Lease operation. + */ +export declare interface BlobChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export declare class BlobClient extends StorageClient { + /** + * blobContext provided by protocol layer. + */ + private blobContext; + private _name; + private _containerName; + private _versionId?; + private _snapshot?; + /** + * The name of the blob. + */ + get name(): string; + /** + * The name of the storage container the blob is associated with. + */ + get containerName(): string; + /** + * + * Creates an instance of BlobClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot: string): BlobClient; + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId: string): BlobClient; + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient(): AppendBlobClient; + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient(): BlockBlobClient; + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient(): PageBlobClient; + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + download(offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + exists(options?: BlobExistsOptions): Promise; + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + getProperties(options?: BlobGetPropertiesOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + delete(options?: BlobDeleteOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + deleteIfExists(options?: BlobDeleteOptions): Promise; + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + undelete(options?: BlobUndeleteOptions): Promise; + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + setHTTPHeaders(blobHTTPHeaders?: BlobHTTPHeaders, options?: BlobSetHTTPHeadersOptions): Promise; + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: BlobSetMetadataOptions): Promise; + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + setTags(tags: Tags, options?: BlobSetTagsOptions): Promise; + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + getTags(options?: BlobGetTagsOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + createSnapshot(options?: BlobCreateSnapshotOptions): Promise; + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + beginCopyFromURL(copySource: string, options?: BlobBeginCopyFromURLOptions): Promise, BlobBeginCopyFromURLResponse>>; + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + abortCopyFromURL(copyId: string, options?: BlobAbortCopyFromURLOptions): Promise; + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + syncCopyFromURL(copySource: string, options?: BlobSyncCopyFromURLOptions): Promise; + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + setAccessTier(tier: BlockBlobTier | PremiumPageBlobTier | string, options?: BlobSetTierOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param buffer - Buffer to be fill, must have length larger than count + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(buffer: Buffer, offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + downloadToFile(filePath: string, offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + private getBlobAndContainerNamesFromUrl; + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + private startCopyFromURL; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: BlobGenerateSasUrlOptions): Promise; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + deleteImmutabilityPolicy(options?: BlobDeleteImmutabilityPolicyOptions): Promise; + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + setImmutabilityPolicy(immutabilityPolicy: BlobImmutabilityPolicy, options?: BlobSetImmutabilityPolicyOptions): Promise; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + setLegalHold(legalHoldEnabled: boolean, options?: BlobSetLegalHoldOptions): Promise; +} + +/** Defines headers for Blob_copyFromURL operation. */ +export declare interface BlobCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: SyncCopyStatusType; + /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */ + contentMD5?: Uint8Array; + /** This response header is returned so that the client can check for the integrity of the copied content. */ + xMsContentCrc64?: Uint8Array; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the copyFromURL operation. */ +export declare type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCopyFromURLHeaders; + }; +}; + +/** Defines values for BlobCopySourceTags. */ +export declare type BlobCopySourceTags = "REPLACE" | "COPY"; + +/** Defines headers for Blob_createSnapshot operation. */ +export declare interface BlobCreateSnapshotHeaders { + /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */ + snapshot?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.createSnapshot} operation. + */ +export declare interface BlobCreateSnapshotOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet when creating blob snapshots. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the createSnapshot operation. */ +export declare type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCreateSnapshotHeaders; + }; +}; + +/** Defines headers for Blob_delete operation. */ +export declare interface BlobDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link BlobClient.deleteIfExists} operation. + */ +export declare interface BlobDeleteIfExistsResponse extends BlobDeleteResponse { + /** + * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place. + */ + succeeded: boolean; +} + +/** Defines headers for Blob_deleteImmutabilityPolicy operation. */ +export declare interface BlobDeleteImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} + +/** + * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation. + */ +export declare interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the deleteImmutabilityPolicy operation. */ +export declare type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteImmutabilityPolicyHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.delete} operation. + */ +export declare interface BlobDeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting blobs. + */ + conditions?: BlobRequestConditions; + /** + * Specifies options to delete blobs that have associated snapshots. + * - `include`: Delete the base blob and all of its snapshots. + * - `only`: Delete only the blob's snapshots and not the blob itself. + */ + deleteSnapshots?: DeleteSnapshotsOptionType; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the delete operation. */ +export declare type BlobDeleteResponse = BlobDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteHeaders; + }; +}; + +/** Defines headers for Blob_download operation. */ +export declare interface BlobDownloadHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** The number of tags associated with the blob */ + tagCount?: number; + /** If this blob has been sealed */ + isSealed?: boolean; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} + +/** Optional parameters. */ +export declare interface BlobDownloadOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + leaseAccessConditions?: LeaseAccessConditions; + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** Parameter group */ + cpkInfo?: CpkInfo; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */ + snapshot?: string; + /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */ + versionId?: string; + /** Return only the bytes of the blob in the specified range. */ + range?: string; + /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentMD5?: boolean; + /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentCRC64?: boolean; +} + +/** + * Options to configure the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve. + */ + snapshot?: string; + /** + * When this is set to true and download range of blob, the service returns the MD5 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentMD5?: boolean; + /** + * When this is set to true and download range of blob, the service returns the CRC64 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentCrc64?: boolean; + /** + * Conditions to meet when downloading blobs. + */ + conditions?: BlobRequestConditions; + /** + * Call back to receive events on the progress of download operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original body download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional `FileClient.download()` request will be made + * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached. + * + * Default value is 5, please set a larger value when loading large files in poor network. + */ + maxRetryRequests?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the download operation. */ +export declare type BlobDownloadResponseModel = BlobDownloadHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDownloadHeaders; + }; +}; + +/** + * Contains response data for the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadResponseParsed extends BlobDownloadResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} + +/** + * Option interface for the {@link BlobClient.downloadToBuffer} operation. + */ +export declare interface BlobDownloadToBufferOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * blockSize is the data every request trying to download. + * Must be greater than or equal to 0. + * If set to 0 or undefined, blockSize will automatically calculated according to the blob size. + */ + blockSize?: number; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original block download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional FileClient.download() request will be made + * from the broken point, until the requested block has been successfully downloaded or + * maxRetryRequestsPerBlock is reached. + * + * Default value is 5, please set a larger value when in poor network. + */ + maxRetryRequestsPerBlock?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel download. + */ + concurrency?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** + * Options to configure the {@link BlobClient.exists} operation. + */ +export declare interface BlobExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Conditions to meet. + */ + conditions?: BlobRequestConditions; +} + +/** + * An interface representing BlobFlatListSegment. + */ +export declare interface BlobFlatListSegment { + blobItems: BlobItem[]; +} + +export declare interface BlobFlatListSegmentModel { + blobItems: BlobItemInternal[]; +} + +/** + * Options to configure {@link BlobClient.generateSasUrl} operation. + */ +export declare interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: BlobSASPermissions; +} + +/** Defines headers for Blob_getProperties operation. */ +export declare interface BlobGetPropertiesHeaders { + /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Included if the blob is incremental copy blob. */ + isIncrementalCopy?: boolean; + /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */ + destinationSnapshot?: string; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */ + contentLength?: number; + /** The content type specified for the blob. The default content type is 'application/octet-stream' */ + contentType?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */ + accessTier?: string; + /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */ + accessTierInferred?: boolean; + /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool/rehydrate-pending-to-cold. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */ + archiveStatus?: string; + /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */ + accessTierChangedOn?: Date; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** The number of tags associated with the blob */ + tagCount?: number; + /** The time this blob will expire. */ + expiresOn?: Date; + /** If this blob has been sealed */ + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */ + rehydratePriority?: RehydratePriority; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting blob properties. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** + * Contains response data for the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} + +/** Contains response data for the getProperties operation. */ +export declare type BlobGetPropertiesResponseModel = BlobGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobGetPropertiesHeaders; + }; +}; + +/** Defines headers for Blob_getTags operation. */ +export declare interface BlobGetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.getTags} operation. + */ +export declare interface BlobGetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} + +/** + * Contains response data for the {@link BlobClient.getTags} operation. + */ +export declare type BlobGetTagsResponse = { + tags: Tags; +} & BlobGetTagsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: BlobGetTagsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: BlobTags; + }; +}; + +/** + * An interface representing BlobHierarchyListSegment. + */ +export declare interface BlobHierarchyListSegment { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItem[]; +} + +export declare interface BlobHierarchyListSegmentModel { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItemInternal[]; +} + +/** Parameter group */ +export declare interface BlobHTTPHeaders { + /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */ + blobCacheControl?: string; + /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */ + blobContentType?: string; + /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */ + blobContentMD5?: Uint8Array; + /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */ + blobContentEncoding?: string; + /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */ + blobContentLanguage?: string; + /** Optional. Sets the blob's Content-Disposition header. */ + blobContentDisposition?: string; +} + +/** + * Describe immutable policy for blob. + */ +export declare interface BlobImmutabilityPolicy { + /** + * Specifies the date time when the blobs immutability policy is set to expire. + */ + expiriesOn?: Date; + /** + * Specifies the immutability policy mode to set on the blob. + */ + policyMode?: BlobImmutabilityPolicyMode; +} + +/** Defines values for BlobImmutabilityPolicyMode. */ +export declare type BlobImmutabilityPolicyMode = "Mutable" | "Unlocked" | "Locked"; + +/** + * An Azure Storage blob + */ +export declare interface BlobItem { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + properties: BlobProperties; + metadata?: { + [propertyName: string]: string; + }; + tags?: Tags; + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + hasVersionsOnly?: boolean; +} + +/** An Azure Storage blob */ +export declare interface BlobItemInternal { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + /** Properties of a blob */ + properties: BlobProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; + /** Blob tags */ + blobTags?: BlobTags; + /** Dictionary of */ + objectReplicationMetadata?: { + [propertyName: string]: string; + }; + /** Inactive root blobs which have any versions would have such tag with value true. */ + hasVersionsOnly?: boolean; +} + +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export declare class BlobLeaseClient { + private _leaseId; + private _url; + private _containerOrBlobOperation; + private _isContainer; + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId(): string; + /** + * Gets the url. + * + * @readonly + */ + get url(): string; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client: ContainerClient | BlobClient, leaseId?: string); + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + acquireLease(duration: number, options?: LeaseOperationOptions): Promise; + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + changeLease(proposedLeaseId: string, options?: LeaseOperationOptions): Promise; + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + releaseLease(options?: LeaseOperationOptions): Promise; + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + renewLease(options?: LeaseOperationOptions): Promise; + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + breakLease(breakPeriod: number, options?: LeaseOperationOptions): Promise; +} + +export declare interface BlobPrefix { + name: string; +} + +/** Properties of a blob */ +export declare interface BlobProperties { + createdOn?: Date; + lastModified: Date; + etag: string; + /** Size in bytes */ + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; + blobSequenceNumber?: number; + blobType?: BlobType; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + copyId?: string; + copyStatus?: CopyStatusType; + copySource?: string; + copyProgress?: string; + copyCompletedOn?: Date; + copyStatusDescription?: string; + serverEncrypted?: boolean; + incrementalCopy?: boolean; + destinationSnapshot?: string; + deletedOn?: Date; + remainingRetentionDays?: number; + accessTier?: AccessTier; + accessTierInferred?: boolean; + archiveStatus?: ArchiveStatus; + customerProvidedKeySha256?: string; + /** The name of the encryption scope under which the blob is encrypted. */ + encryptionScope?: string; + accessTierChangedOn?: Date; + tagCount?: number; + expiresOn?: Date; + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */ + rehydratePriority?: RehydratePriority; + lastAccessedOn?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; +} + +/** + * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}. + */ +export declare interface BlobQueryArrowConfiguration { + /** + * Kind. + */ + kind: "arrow"; + /** + * List of {@link BlobQueryArrowField} describing the schema of the data. + */ + schema: BlobQueryArrowField[]; +} + +/** + * Describe a field in {@link BlobQueryArrowConfiguration}. + */ +export declare interface BlobQueryArrowField { + /** + * The type of the field. + */ + type: BlobQueryArrowFieldType; + /** + * The name of the field. + */ + name?: string; + /** + * The precision of the field. Required if type is "decimal". + */ + precision?: number; + /** + * The scale of the field. Required if type is is "decimal". + */ + scale?: number; +} + +/** + * The type of a {@link BlobQueryArrowField}. + */ +export declare type BlobQueryArrowFieldType = "int64" | "bool" | "timestamp[ms]" | "string" | "double" | "decimal"; + +/** + * Options to query blob with CSV format. + */ +export declare interface BlobQueryCsvTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a CSV format blob. + */ + kind: "csv"; + /** + * Column separator. Default is ",". + */ + columnSeparator?: string; + /** + * Field quote. + */ + fieldQuote?: string; + /** + * Escape character. + */ + escapeCharacter?: string; + /** + * Has headers. Default is false. + */ + hasHeaders?: boolean; +} + +/** + * Blob query error type. + */ +export declare interface BlobQueryError { + /** + * Whether error is fatal. Fatal error will stop query. + */ + isFatal: boolean; + /** + * Error name. + */ + name: string; + /** + * Position in bytes of the query. + */ + position: number; + /** + * Error description. + */ + description: string; +} + +/** Defines headers for Blob_query operation. */ +export declare interface BlobQueryHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletionTime?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} + +/** + * Options to query blob with JSON format. + */ +export declare interface BlobQueryJsonTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a JSON format blob. + */ + kind: "json"; +} + +/** + * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}. + */ +export declare interface BlobQueryParquetConfiguration { + /** + * Kind. + */ + kind: "parquet"; +} + +/** Contains response data for the query operation. */ +export declare type BlobQueryResponseModel = BlobQueryHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobQueryHeaders; + }; +}; + +/** + * Options to configure Blob - Release Lease operation. + */ +export declare interface BlobReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Blob - Renew Lease operation. + */ +export declare interface BlobRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * standard HTTP conditional headers, tags condition and lease condition + */ +export declare interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions { +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): BlobSASPermissions; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString(): string; +} + +/** + * A type that looks like a Blob SAS permission. + * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface BlobSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs. + */ +export declare interface BlobSASSignatureValues { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource + * being accessed for help constructing the permissions string. + */ + permissions?: BlobSASPermissions | ContainerSASPermissions; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * The name of the container the SAS user may access. + */ + containerName: string; + /** + * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided. + */ + blobName?: string; + /** + * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09. + */ + snapshotTime?: string; + /** + * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10. + */ + versionId?: string; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; + /** + * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user + * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will + * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission + * check for the user specified in this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to + * correlate SAS generation with storage resource access. This is only used for User Delegation SAS. + */ + correlationId?: string; +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export declare class BlobServiceClient extends StorageClient { + /** + * serviceContext provided by protocol layer. + */ + private serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): BlobServiceClient; + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + * + * Example using DefaultAzureCredential from `@azure/identity`: + * + * ```js + * const account = ""; + * + * const defaultAzureCredential = new DefaultAzureCredential(); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * defaultAzureCredential + * ); + * ``` + * + * Example using an account name/key: + * + * ```js + * const account = "" + * const sharedKeyCredential = new StorageSharedKeyCredential(account, ""); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * sharedKeyCredential + * ); + * ``` + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName: string): ContainerClient; + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + createContainer(containerName: string, options?: ContainerCreateOptions): Promise<{ + containerClient: ContainerClient; + containerCreateResponse: ContainerCreateResponse; + }>; + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + deleteContainer(containerName: string, options?: ContainerDeleteMethodOptions): Promise; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + undeleteContainer(deletedContainerName: string, deletedContainerVersion: string, options?: ServiceUndeleteContainerOptions): Promise<{ + containerClient: ContainerClient; + containerUndeleteResponse: ContainerUndeleteResponse; + }>; + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + private renameContainer; + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + getProperties(options?: ServiceGetPropertiesOptions): Promise; + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + setProperties(properties: BlobServiceProperties, options?: ServiceSetPropertiesOptions): Promise; + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + getStatistics(options?: ServiceGetStatisticsOptions): Promise; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + getAccountInfo(options?: ServiceGetAccountInfoOptions): Promise; + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + private listContainersSegment; + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ServiceFindBlobByTagsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options?: ServiceListContainersOptions): PagedAsyncIterableIterator; + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient(): BlobBatchClient; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string; +} + +/** Storage Service Properties. */ +export declare interface BlobServiceProperties { + /** Azure Analytics Logging settings. */ + blobAnalyticsLogging?: Logging; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + hourMetrics?: Metrics; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + minuteMetrics?: Metrics; + /** The set of CORS rules. */ + cors?: CorsRule[]; + /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */ + defaultServiceVersion?: string; + /** the retention policy which determines how long the associated data should persist */ + deleteRetentionPolicy?: RetentionPolicy; + /** The properties that enable an account to host a static website */ + staticWebsite?: StaticWebsite; +} + +/** Stats for the storage service. */ +export declare interface BlobServiceStatistics { + /** Geo-Replication information for the Secondary Storage Service */ + geoReplication?: GeoReplication; +} + +/** Defines headers for Blob_setHttpHeaders operation. */ +export declare interface BlobSetHTTPHeadersHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setHTTPHeaders} operation. + */ +export declare interface BlobSetHTTPHeadersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob HTTP headers. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the setHttpHeaders operation. */ +export declare type BlobSetHTTPHeadersResponse = BlobSetHTTPHeadersHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetHTTPHeadersHeaders; + }; +}; + +/** Defines headers for Blob_setImmutabilityPolicy operation. */ +export declare interface BlobSetImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates the time the immutability policy will expire. */ + immutabilityPolicyExpiry?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; +} + +/** + * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation. + */ +export declare interface BlobSetImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + modifiedAccessCondition?: ModificationConditions; +} + +/** Contains response data for the setImmutabilityPolicy operation. */ +export declare type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetImmutabilityPolicyHeaders; + }; +}; + +/** Defines headers for Blob_setLegalHold operation. */ +export declare interface BlobSetLegalHoldHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates if the blob has a legal hold. */ + legalHold?: boolean; +} + +/** + * Options for setting legal hold {@link BlobClient.setLegalHold} operation. + */ +export declare interface BlobSetLegalHoldOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the setLegalHold operation. */ +export declare type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetLegalHoldHeaders; + }; +}; + +/** Defines headers for Blob_setMetadata operation. */ +export declare interface BlobSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setMetadata} operation. + */ +export declare interface BlobSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob metadata. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the setMetadata operation. */ +export declare type BlobSetMetadataResponse = BlobSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetMetadataHeaders; + }; +}; + +/** Defines headers for Blob_setTags operation. */ +export declare interface BlobSetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setTags} operation. + */ +export declare interface BlobSetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} + +/** Contains response data for the setTags operation. */ +export declare type BlobSetTagsResponse = BlobSetTagsHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTagsHeaders; + }; +}; + +/** Defines headers for Blob_setTier operation. */ +export declare interface BlobSetTierHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setAccessTier} operation. + */ +export declare interface BlobSetTierOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; +} + +/** Contains response data for the setTier operation. */ +export declare type BlobSetTierResponse = BlobSetTierHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTierHeaders; + }; +}; + +/** Defines headers for Blob_startCopyFromURL operation. */ +export declare interface BlobStartCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobStartCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the blob that are being copied. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Overrides the sealed state of the destination blob. Default true. + */ + sealBlob?: boolean; +} + +/** Contains response data for the startCopyFromURL operation. */ +export declare type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobStartCopyFromURLHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.syncCopyFromURL} operation. + */ +export declare interface BlobSyncCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} + +export declare interface BlobTag { + key: string; + value: string; +} + +/** Blob tags */ +export declare interface BlobTags { + blobTagSet: BlobTag[]; +} + +/** Defines values for BlobType. */ +export declare type BlobType = "BlockBlob" | "PageBlob" | "AppendBlob"; + +/** Defines headers for Blob_undelete operation. */ +export declare interface BlobUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.undelete} operation. + */ +export declare interface BlobUndeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the undelete operation. */ +export declare type BlobUndeleteResponse = BlobUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobUndeleteHeaders; + }; +}; + +/** + * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and + * {@link BlockBlobClient.uploadBrowserDate}. + */ +export declare type BlobUploadCommonResponse = BlockBlobUploadHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse; +}; + +/** Represents a single block in a block blob. It describes the block's ID and size. */ +export declare interface Block { + /** The base64 encoded block ID. */ + name: string; + /** The block size in bytes. */ + size: number; +} + +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export declare class BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + private _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + private blockBlobContext; + /** + * + * Creates an instance of BlockBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): BlockBlobClient; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + query(query: string, options?: BlockBlobQueryOptions): Promise; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + upload(body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise; + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + syncUploadFromURL(sourceURL: string, options?: BlockBlobSyncUploadFromURLOptions): Promise; + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + stageBlock(blockId: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobStageBlockOptions): Promise; + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + stageBlockFromURL(blockId: string, sourceURL: string, offset?: number, count?: number, options?: BlockBlobStageBlockFromURLOptions): Promise; + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + commitBlockList(blocks: string[], options?: BlockBlobCommitBlockListOptions): Promise; + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + getBlockList(listType: BlockListType, options?: BlockBlobGetBlockListOptions): Promise; + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + uploadBrowserData(browserData: Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + private uploadSeekableInternal; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadFile(filePath: string, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadStream(stream: Readable, bufferSize?: number, maxConcurrency?: number, options?: BlockBlobUploadStreamOptions): Promise; +} + +/** Defines headers for BlockBlob_commitBlockList operation. */ +export declare interface BlockBlobCommitBlockListHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.commitBlockList} operation. + */ +export declare interface BlockBlobCommitBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when committing the block list. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when committing block list. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when committing block list. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the commitBlockList operation. */ +export declare type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobCommitBlockListHeaders; + }; +}; + +/** Defines headers for BlockBlob_getBlockList operation. */ +export declare interface BlockBlobGetBlockListHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The media type of the body of the response. For Get Block List this is 'application/xml' */ + contentType?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.getBlockList} operation. + */ +export declare interface BlockBlobGetBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; +} + +/** Contains response data for the getBlockList operation. */ +export declare type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders & BlockList & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlockList; + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobGetBlockListHeaders; + }; +}; + +/** + * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}. + */ +export declare interface BlockBlobParallelUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Destination block blob size in bytes. + */ + blockSize?: number; + /** + * Blob size threshold in bytes to start concurrency uploading. + * Default value is 256MB, blob size less than this option will + * be uploaded via one I/O operation without concurrency. + * You can customize a value less equal than the default value. + */ + maxSingleShotSize?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Blob HTTP Headers. A common header to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel uploading. Must be greater than or equal to 0. + */ + concurrency?: number; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} + +/** Defines headers for BlockBlob_putBlobFromUrl operation. */ +export declare interface BlockBlobPutBlobFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the putBlobFromUrl operation. */ +export declare type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobPutBlobFromUrlHeaders; + }; +}; + +/** + * Options to configure {@link BlockBlobClient.query} operation. + */ +export declare interface BlockBlobQueryOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Configurations for the query input. + */ + inputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryParquetConfiguration; + /** + * Configurations for the query output. + */ + outputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration; + /** + * Callback to receive events on the progress of query operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback to receive error events during the query operaiton. + */ + onError?: (error: BlobQueryError) => void; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Defines headers for BlockBlob_stageBlockFromURL operation. */ +export declare interface BlockBlobStageBlockFromURLHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation. + */ +export declare interface BlockBlobStageBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Specifies the bytes of the source Blob/File to upload. + * If not specified, the entire content is uploaded as a single block. + */ + range?: Range_2; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the stageBlockFromURL operation. */ +export declare type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockFromURLHeaders; + }; +}; + +/** Defines headers for BlockBlob_stageBlock operation. */ +export declare interface BlockBlobStageBlockHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.stageBlock} operation. + */ +export declare interface BlockBlobStageBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * Callback to receive events on the progress of stage block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the stageBlock operation. */ +export declare type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockHeaders; + }; +}; + +/** + * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation. + */ +export declare interface BlockBlobSyncUploadFromURLOptions extends CommonOptions { + /** + * Server timeout in seconds. + * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations + */ + timeoutInSeconds?: number; + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value + * pairs are specified, the operation will copy the metadata from the source blob or file to the + * destination blob. If one or more name-value pairs are specified, the destination blob is + * created with the specified metadata, and metadata is not copied from the source blob or file. + * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules + * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + * information. + */ + metadata?: Metadata; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Optional, default is true. Indicates if properties from the source blob should be copied. + */ + copySourceBlobProperties?: boolean; + /** + * HTTP headers to set when uploading to a block blob. + * + * A common header to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Conditions to meet for the destination Azure Blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Conditions to meet for the source Azure Blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} + +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export declare enum BlockBlobTier { + /** + * Optimized for storing data that is accessed frequently. + */ + Hot = "Hot", + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + Cool = "Cool", + /** + * Optimized for storing data that is rarely accessed. + */ + Cold = "Cold", + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + Archive = "Archive" +} + +/** Defines headers for BlockBlob_upload operation. */ +export declare interface BlockBlobUploadHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.upload} operation. + */ +export declare interface BlockBlobUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when uploading to a block blob. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when uploading to a block blob. + */ + metadata?: Metadata; + /** + * Callback to receive events on the progress of upload operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the upload operation. */ +export declare type BlockBlobUploadResponse = BlockBlobUploadHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobUploadHeaders; + }; +}; + +/** + * Option interface for the {@link BlockBlobClient.uploadStream} operation. + */ +export declare interface BlockBlobUploadStreamOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Blob HTTP Headers. + * + * A common header to set is `blobContentType`, enabling the + * browser to provide functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} + +export declare interface BlockList { + committedBlocks?: Block[]; + uncommittedBlocks?: Block[]; +} + +/** Defines values for BlockListType. */ +export declare type BlockListType = "committed" | "uncommitted" | "all"; + +declare interface ClearRange { + start: number; + end: number; +} + +/** + * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}. + */ +export declare interface CommonGenerateSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; +} + +/** + * An interface for options common to every remote operation. + */ +export declare interface CommonOptions { + /** + * Options to configure spans created when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} + +/** + * Options to configure Container - Acquire Lease operation. + */ +export declare interface ContainerAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** Optional parameters. */ +export declare interface ContainerBreakLeaseOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */ + breakPeriod?: number; +} + +/** + * Options to configure Container - Break Lease operation. + */ +export declare interface ContainerBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Container - Change Lease operation. + */ +export declare interface ContainerChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export declare class ContainerClient extends StorageClient { + /** + * containerContext provided by protocol layer. + */ + private containerContext; + private _containerName; + /** + * The name of the container. + */ + get containerName(): string; + /** + * + * Creates an instance of ContainerClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + create(options?: ContainerCreateOptions): Promise; + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - + */ + createIfNotExists(options?: ContainerCreateOptions): Promise; + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + exists(options?: ContainerExistsOptions): Promise; + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName: string): BlobClient; + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName: string): AppendBlobClient; + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName: string): BlockBlobClient; + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName: string): PageBlobClient; + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + getProperties(options?: ContainerGetPropertiesOptions): Promise; + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + delete(options?: ContainerDeleteMethodOptions): Promise; + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + deleteIfExists(options?: ContainerDeleteMethodOptions): Promise; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: ContainerSetMetadataOptions): Promise; + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + getAccessPolicy(options?: ContainerGetAccessPolicyOptions): Promise; + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + setAccessPolicy(access?: PublicAccessType, containerAcl?: SignedIdentifier[], options?: ContainerSetAccessPolicyOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + uploadBlockBlob(blobName: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise<{ + blockBlobClient: BlockBlobClient; + response: BlockBlobUploadResponse; + }>; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + deleteBlob(blobName: string, options?: ContainerDeleteBlobOptions): Promise; + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + private listBlobFlatSegment; + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + private listBlobHierarchySegment; + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options?: ContainerListBlobsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listHierarchySegments; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + private listItemsByHierarchy; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter: string, options?: ContainerListBlobsOptions): PagedAsyncIterableIterator<({ + kind: "prefix"; + } & BlobPrefix) | ({ + kind: "blob"; + } & BlobItem), ContainerListBlobHierarchySegmentResponse>; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator; + private getContainerNameFromUrl; + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient(): BlobBatchClient; +} + +/** Defines headers for Container_create operation. */ +export declare interface ContainerCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link ContainerClient.createIfNotExists} operation. + */ +export declare interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse { + /** + * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link ContainerClient.create} operation. + */ +export declare interface ContainerCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the container. + */ + metadata?: Metadata; + /** + * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include: + * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account. + * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. + */ + access?: PublicAccessType; + /** + * Container encryption scope info. + */ + containerEncryptionScope?: ContainerEncryptionScope; +} + +/** Contains response data for the create operation. */ +export declare type ContainerCreateResponse = ContainerCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerCreateHeaders; + }; +}; + +/** + * Options to configure the {@link ContainerClient.deleteBlob} operation. + */ +export declare interface ContainerDeleteBlobOptions extends BlobDeleteOptions { + /** + * An opaque DateTime value that, when present, specifies the version + * of the blob to delete. It's for service version 2019-10-10 and newer. + */ + versionId?: string; +} + +/** Defines headers for Container_delete operation. */ +export declare interface ContainerDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link ContainerClient.deleteIfExists} operation. + */ +export declare interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse { + /** + * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link ContainerClient.delete} operation. + */ +export declare interface ContainerDeleteMethodOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting the container. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the delete operation. */ +export declare type ContainerDeleteResponse = ContainerDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerDeleteHeaders; + }; +}; + +/** Parameter group */ +export declare interface ContainerEncryptionScope { + /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */ + defaultEncryptionScope?: string; + /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */ + preventEncryptionScopeOverride?: boolean; +} + +/** + * Options to configure {@link ContainerClient.exists} operation. + */ +export declare interface ContainerExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Defines headers for Container_filterBlobs operation. */ +export declare interface ContainerFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} + +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ContainerFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; + +/** + * Options to configure {@link ContainerClient.generateSasUrl} operation. + */ +export declare interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: ContainerSASPermissions; +} + +/** Defines headers for Container_getAccessPolicy operation. */ +export declare interface ContainerGetAccessPolicyHeaders { + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.getAccessPolicy} operation. + */ +export declare interface ContainerGetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** + * Contains response data for the {@link ContainerClient.getAccessPolicy} operation. + */ +export declare type ContainerGetAccessPolicyResponse = { + signedIdentifiers: SignedIdentifier[]; +} & ContainerGetAccessPolicyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerGetAccessPolicyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: SignedIdentifierModel[]; + }; +}; + +/** Defines headers for Container_getProperties operation. */ +export declare interface ContainerGetPropertiesHeaders { + metadata?: { + [propertyName: string]: string; + }; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** Indicates whether the container has an immutability policy set on it. */ + hasImmutabilityPolicy?: boolean; + /** Indicates whether the container has a legal hold. */ + hasLegalHold?: boolean; + /** The default encryption scope for the container. */ + defaultEncryptionScope?: string; + /** Indicates whether the container's default encryption scope can be overriden. */ + denyEncryptionScopeOverride?: boolean; + /** Indicates whether version level worm is enabled on a container. */ + isImmutableStorageWithVersioningEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.getProperties} operation. + */ +export declare interface ContainerGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** Contains response data for the getProperties operation. */ +export declare type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerGetPropertiesHeaders; + }; +}; + +/** An Azure Storage container */ +export declare interface ContainerItem { + name: string; + deleted?: boolean; + version?: string; + /** Properties of a container */ + properties: ContainerProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; +} + +/** Defines headers for Container_listBlobFlatSegment operation. */ +export declare interface ContainerListBlobFlatSegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the listBlobFlatSegment operation. + */ +export declare type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse & ContainerListBlobFlatSegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobFlatSegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsFlatSegmentResponseModel; + }; +}; + +/** Defines headers for Container_listBlobHierarchySegment operation. */ +export declare interface ContainerListBlobHierarchySegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the listBlobHierarchySegment operation. + */ +export declare type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse & ContainerListBlobHierarchySegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobHierarchySegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsHierarchySegmentResponseModel; + }; +}; + +/** + * Options to configure Container - List Blobs operations. + * + * See: + * - {@link ContainerClient.listBlobsFlat} + * - {@link ContainerClient.listBlobsByHierarchy} + */ +export declare interface ContainerListBlobsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response. + */ + includeCopy?: boolean; + /** + * Specifies whether soft deleted blobs should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether blob metadata be returned in the response. + */ + includeMetadata?: boolean; + /** + * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response. + */ + includeSnapshots?: boolean; + /** + * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response. + */ + includeVersions?: boolean; + /** + * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response. + */ + includeUncommitedBlobs?: boolean; + /** + * Specifies whether blob tags be returned in the response. + */ + includeTags?: boolean; + /** + * Specifies whether deleted blob with versions be returned in the response. + */ + includeDeletedWithVersions?: boolean; + /** + * Specifies whether blob immutability policy be returned in the response. + */ + includeImmutabilityPolicy?: boolean; + /** + * Specifies whether blob legal hold be returned in the response. + */ + includeLegalHold?: boolean; +} + +/** Properties of a container */ +export declare interface ContainerProperties { + lastModified: Date; + etag: string; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + publicAccess?: PublicAccessType; + hasImmutabilityPolicy?: boolean; + hasLegalHold?: boolean; + defaultEncryptionScope?: string; + preventEncryptionScopeOverride?: boolean; + deletedOn?: Date; + remainingRetentionDays?: number; + /** Indicates if version level worm is enabled on this container. */ + isImmutableStorageWithVersioningEnabled?: boolean; +} + +/** + * Options to configure Container - Release Lease operation. + */ +export declare interface ContainerReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** Defines headers for Container_rename operation. */ +export declare interface ContainerRenameHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the rename operation. */ +export declare type ContainerRenameResponse = ContainerRenameHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerRenameHeaders; + }; +}; + +/** + * Options to configure Container - Renew Lease operation. + */ +export declare interface ContainerRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Conditions to meet for the container. + */ +export declare interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions { +} + +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): ContainerSASPermissions; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specifies List access granted. + */ + list: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString(): string; +} + +/** + * A type that looks like a Container SAS permission. + * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface ContainerSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specifies List access granted. + */ + list?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags?: boolean; +} + +/** Defines headers for Container_setAccessPolicy operation. */ +export declare interface ContainerSetAccessPolicyHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.setAccessPolicy} operation. + */ +export declare interface ContainerSetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting the access policy. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the setAccessPolicy operation. */ +export declare type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetAccessPolicyHeaders; + }; +}; + +/** Defines headers for Container_setMetadata operation. */ +export declare interface ContainerSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.setMetadata} operation. + */ +export declare interface ContainerSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the setMetadata operation. */ +export declare type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetMetadataHeaders; + }; +}; + +/** Defines headers for Container_restore operation. */ +export declare interface ContainerUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the restore operation. */ +export declare type ContainerUndeleteResponse = ContainerUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerUndeleteHeaders; + }; +}; + +/** + * Defines the operations from a {@link BlobClient} that are needed for the poller + * returned by {@link BlobClient.beginCopyFromURL} to work. + */ +export declare type CopyPollerBlobClient = Pick & { + startCopyFromURL(copySource: string, options?: BlobStartCopyFromURLOptions): Promise; +}; + +/** Defines values for CopyStatusType. */ +export declare type CopyStatusType = "pending" | "success" | "aborted" | "failed"; + +/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */ +export declare interface CorsRule { + /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */ + allowedOrigins: string; + /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */ + allowedMethods: string; + /** the request headers that the origin domain may specify on the CORS request. */ + allowedHeaders: string; + /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */ + exposedHeaders: string; + /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ + maxAgeInSeconds: number; +} + +/** Parameter group */ +export declare interface CpkInfo { + /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */ + encryptionKey?: string; + /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */ + encryptionKeySha256?: string; + /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is provided. */ + encryptionAlgorithm?: EncryptionAlgorithmType; +} + +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +declare abstract class Credential_2 implements RequestPolicyFactory { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy; +} +export { Credential_2 as Credential } + +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export declare abstract class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; +} + +/** + * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy. + */ +export declare type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy; + +/** Defines values for DeleteSnapshotsOptionType. */ +export declare type DeleteSnapshotsOptionType = "include" | "only"; + +export { deserializationPolicy } + +/** + * Defines values for EncryptionAlgorithmType. \ + * {@link KnownEncryptionAlgorithmType} can be used interchangeably with EncryptionAlgorithmType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **AES256** + */ +export declare type EncryptionAlgorithmType = string; + +/** + * Blob info from a {@link BlobServiceClient.findBlobsByTags} + */ +export declare interface FilterBlobItem { + /** + * Blob Name. + */ + name: string; + /** + * Container Name. + */ + containerName: string; + /** + * Blob Tags. + */ + tags?: Tags; + /** + * Tag value. + * + * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags. + */ + tagValue: string; +} + +/** Blob info from a Filter Blobs API call */ +export declare interface FilterBlobItemModel { + name: string; + containerName: string; + /** Blob tags */ + tags?: BlobTags; +} + +/** + * Segment response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface FilterBlobSegment { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItem[]; + continuationToken?: string; +} + +/** The result of a Filter Blobs API call */ +export declare interface FilterBlobSegmentModel { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItemModel[]; + continuationToken?: string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * Fill in the required details before running the following snippets. + * + * Example usage: + * + * ```js + * // Generate service level SAS for a container + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using an identifier: + * + * ```js + * // Generate service level SAS for a container with identifier + * // startsOn & permissions are optional when identifier is provided + * const identifier = "unique-id"; + * await containerClient.setAccessPolicy(undefined, [ + * { + * accessPolicy: { + * expiresOn: new Date(new Date().valueOf() + 86400), // Date type + * permissions: ContainerSASPermissions.parse("racwdl").toString(), + * startsOn: new Date() // Date type + * }, + * id: identifier + * } + * ]); + * + * const containerSAS = generateBlobSASQueryParameters( + * { + * containerName, // Required + * identifier // Required + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using a blob name: + * + * ```js + * // Generate service level SAS for a blob + * const blobSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * blobName, // Required + * permissions: BlobSASPermissions.parse("racwd"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * cacheControl: "cache-control-override", // Optional + * contentDisposition: "content-disposition-override", // Optional + * contentEncoding: "content-encoding-override", // Optional + * contentLanguage: "content-language-override", // Optional + * contentType: "content-type-override", // Optional + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required. + * + * Example usage: + * + * ```js + * // Generate user delegation SAS for a container + * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn); + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn, // Optional. Date type + * expiresOn, // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2018-11-09" // Must greater than or equal to 2018-11-09 to generate user delegation SAS + * }, + * userDelegationKey, // UserDelegationKey + * accountName + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @param accountName - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters; + +/** Geo-Replication information for the Secondary Storage Service */ +export declare interface GeoReplication { + /** The status of the secondary location */ + status: GeoReplicationStatusType; + /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */ + lastSyncOn: Date; +} + +/** Defines values for GeoReplicationStatusType. */ +export declare type GeoReplicationStatusType = "live" | "bootstrap" | "unavailable"; + +export declare function getBlobServiceAccountAudience(storageAccountName: string): string; + +/** + * Represents authentication information in Authorization, ProxyAuthorization, + * WWW-Authenticate, and Proxy-Authenticate header values. + */ +export declare interface HttpAuthorization { + /** + * The scheme to use for authorization. + */ + scheme: string; + /** + * the credentials containing the authentication information of the user agent for the resource being requested. + */ + value: string; +} + +export { HttpHeaders } + +export { HttpOperationResponse } + +export { HttpRequestBody } + +export { IHttpClient } + +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export declare function isPipelineLike(pipeline: unknown): pipeline is PipelineLike; + +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +export declare enum KnownEncryptionAlgorithmType { + AES256 = "AES256" +} + +/** + * The details for a specific lease. + */ +export declare interface Lease { + /** + * The ETag contains a value that you can use to + * perform operations conditionally. If the request version is 2011-08-18 or + * newer, the ETag value will be in quotes. + */ + etag?: string; + /** + * Returns the date and time the container was + * last modified. Any operation that modifies the blob, including an update + * of the blob's metadata or properties, changes the last-modified time of + * the blob. + */ + lastModified?: Date; + /** + * Uniquely identifies a container's lease + */ + leaseId?: string; + /** + * Approximate time remaining in the lease + * period, in seconds. + */ + leaseTime?: number; + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + */ + requestId?: string; + /** + * Indicates the version of the Blob service used + * to execute the request. This header is returned for requests made against + * version 2009-09-19 and above. + */ + version?: string; + /** + * UTC date/time value generated by the service that + * indicates the time at which the response was initiated + */ + date?: Date; + /** + * Error code if any associated with the response that returned + * the Lease information. + */ + errorCode?: string; +} + +/** Parameter group */ +export declare interface LeaseAccessConditions { + /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */ + leaseId?: string; +} + +/** Defines values for LeaseDurationType. */ +export declare type LeaseDurationType = "infinite" | "fixed"; + +/** + * Configures lease operations. + */ +export declare interface LeaseOperationOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Contains the response data for operations that create, modify, or delete a lease. + * + * See {@link BlobLeaseClient}. + */ +export declare type LeaseOperationResponse = Lease & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: Lease; + }; +}; + +/** Defines values for LeaseStateType. */ +export declare type LeaseStateType = "available" | "leased" | "expired" | "breaking" | "broken"; + +/** Defines values for LeaseStatusType. */ +export declare type LeaseStatusType = "locked" | "unlocked"; + +/** + * An enumeration of blobs + */ +export declare interface ListBlobsFlatSegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegment; + continuationToken?: string; +} + +/** An enumeration of blobs */ +export declare interface ListBlobsFlatSegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegmentModel; + continuationToken?: string; +} + +/** + * An enumeration of blobs + */ +export declare interface ListBlobsHierarchySegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegment; + continuationToken?: string; +} + +/** An enumeration of blobs */ +export declare interface ListBlobsHierarchySegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegmentModel; + continuationToken?: string; +} + +/** An enumeration of containers */ +export declare interface ListContainersSegmentResponse { + serviceEndpoint: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + containerItems: ContainerItem[]; + continuationToken?: string; +} + +/** + * The `@azure/logger` configuration for this package. + */ +export declare const logger: AzureLogger; + +/** Azure Analytics Logging settings. */ +export declare interface Logging { + /** The version of Storage Analytics to configure. */ + version: string; + /** Indicates whether all delete requests should be logged. */ + deleteProperty: boolean; + /** Indicates whether all read requests should be logged. */ + read: boolean; + /** Indicates whether all write requests should be logged. */ + write: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy: RetentionPolicy; +} + +/** + * Specifies HTTP options for conditional requests based on ETag matching. + */ +export declare interface MatchConditions { + /** + * Specify an ETag value to operate only on blobs with a matching value. + */ + ifMatch?: string; + /** + * Specify an ETag value to operate only on blobs without a matching value. + */ + ifNoneMatch?: string; +} + +/** + * A map of name-value pairs to associate with the resource. + */ +export declare interface Metadata { + /** + * A name-value pair. + */ + [propertyName: string]: string; +} + +/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ +export declare interface Metrics { + /** The version of Storage Analytics to configure. */ + version?: string; + /** Indicates whether metrics are enabled for the Blob service. */ + enabled: boolean; + /** Indicates whether metrics should generate summary statistics for called API operations. */ + includeAPIs?: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy?: RetentionPolicy; +} + +/** + * Specifies HTTP options for conditional requests based on modification time. + */ +export declare interface ModificationConditions { + /** + * Specify this header value to operate only on a blob if it has been modified since the + * specified date/time. + */ + ifModifiedSince?: Date; + /** + * Specify this header value to operate only on a blob if it has not been modified since the + * specified date/time. + */ + ifUnmodifiedSince?: Date; +} + +/** + * standard HTTP conditional headers and tags condition. + */ +export declare interface ModifiedAccessConditions extends MatchConditions, ModificationConditions, TagConditions { +} + +/** Parameter group */ +export declare interface ModifiedAccessConditionsModel { + /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */ + ifModifiedSince?: Date; + /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */ + ifUnmodifiedSince?: Date; + /** Specify an ETag value to operate only on blobs with a matching value. */ + ifMatch?: string; + /** Specify an ETag value to operate only on blobs without a matching value. */ + ifNoneMatch?: string; + /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */ + ifTags?: string; +} + +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export declare function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline; + +/** + * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}. + * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the + * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses + * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}. + */ +export declare interface ObjectReplicationPolicy { + /** + * The Object Replication Policy ID. + */ + policyId: string; + /** + * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID. + */ + rules: ObjectReplicationRule[]; +} + +/** + * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob. + * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}. + */ +export declare interface ObjectReplicationRule { + /** + * The Object Replication Rule ID. + */ + ruleId: string; + /** + * The Replication Status + */ + replicationStatus: ObjectReplicationStatus; +} + +/** + * Specifies the Replication Status of a blob. This is used when a storage account has + * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}. + */ +export declare type ObjectReplicationStatus = "complete" | "failed"; + +/** Defines headers for PageBlob_clearPages operation. */ +export declare interface PageBlobClearPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.clearPages} operation. + */ +export declare interface PageBlobClearPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when clearing pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the clearPages operation. */ +export declare type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobClearPagesHeaders; + }; +}; + +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export declare class PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + private pageBlobContext; + /** + * + * Creates an instance of PageBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * + * @param url - A URL string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): PageBlobClient; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + create(size: number, options?: PageBlobCreateOptions): Promise; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + createIfNotExists(size: number, options?: PageBlobCreateIfNotExistsOptions): Promise; + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + uploadPages(body: HttpRequestBody, offset: number, count: number, options?: PageBlobUploadPagesOptions): Promise; + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + uploadPagesFromURL(sourceURL: string, sourceOffset: number, destOffset: number, count: number, options?: PageBlobUploadPagesFromURLOptions): Promise; + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + clearPages(offset?: number, count?: number, options?: PageBlobClearPagesOptions): Promise; + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + getPageRanges(offset?: number, count?: number, options?: PageBlobGetPageRangesOptions): Promise; + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + private listPageRangesSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItems; + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset?: number, count?: number, options?: PageBlobListPageRangesOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangesDiffSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItems; + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobListPageRangesDiffOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiffForManagedDisks(offset: number, count: number, prevSnapshotUrl: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + resize(size: number, options?: PageBlobResizeOptions): Promise; + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + updateSequenceNumber(sequenceNumberAction: SequenceNumberActionType, sequenceNumber?: number, options?: PageBlobUpdateSequenceNumberOptions): Promise; + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + startCopyIncremental(copySource: string, options?: PageBlobStartCopyIncrementalOptions): Promise; +} + +/** Defines headers for PageBlob_copyIncremental operation. */ +export declare interface PageBlobCopyIncrementalHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the copyIncremental operation. */ +export declare type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCopyIncrementalHeaders; + }; +}; + +/** Defines headers for PageBlob_create operation. */ +export declare interface PageBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; +} + +/** + * Contains response data for the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure the {@link PageBlobClient.create} operation. + */ +export declare interface PageBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating a page blob. + */ + conditions?: BlobRequestConditions; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the create operation. */ +export declare type PageBlobCreateResponse = PageBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCreateHeaders; + }; +}; + +/** Defines headers for PageBlob_getPageRangesDiff operation. */ +export declare interface PageBlobGetPageRangesDiffHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.getRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; + /** + * (unused) + */ + range?: string; +} + +/** + * Contains response data for the {@link BlobClient.getPageRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffResponse extends PageList, PageBlobGetPageRangesDiffHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} + +/** Contains response data for the getPageRangesDiff operation. */ +export declare type PageBlobGetPageRangesDiffResponseModel = PageBlobGetPageRangesDiffHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + }; +}; + +/** Defines headers for PageBlob_getPageRanges operation. */ +export declare interface PageBlobGetPageRangesHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} + +/** + * Contains response data for the {@link BlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} + +/** Contains response data for the getPageRanges operation. */ +export declare type PageBlobGetPageRangesResponseModel = PageBlobGetPageRangesHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesHeaders; + }; +}; + +/** + * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation. + */ +export declare interface PageBlobListPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; +} + +/** + * Options to configure the {@link PageBlobClient.listPageRanges} operation. + */ +export declare interface PageBlobListPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} + +/** + * Conditions to add to the creation of this page blob. + */ +export declare interface PageBlobRequestConditions extends BlobRequestConditions, SequenceNumberAccessConditions { +} + +/** Defines headers for PageBlob_resize operation. */ +export declare interface PageBlobResizeHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.resize} operation. + */ +export declare interface PageBlobResizeOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when resizing a page blob. + */ + conditions?: BlobRequestConditions; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the resize operation. */ +export declare type PageBlobResizeResponse = PageBlobResizeHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobResizeHeaders; + }; +}; + +/** + * Options to configure {@link PageBlobClient.startCopyIncremental} operation. + */ +export declare interface PageBlobStartCopyIncrementalOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when starting a copy incremental operation. + */ + conditions?: ModifiedAccessConditions; +} + +/** Defines headers for PageBlob_updateSequenceNumber operation. */ +export declare interface PageBlobUpdateSequenceNumberHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.updateSequenceNumber} operation. + */ +export declare interface PageBlobUpdateSequenceNumberOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: BlobRequestConditions; +} + +/** Contains response data for the updateSequenceNumber operation. */ +export declare type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUpdateSequenceNumberHeaders; + }; +}; + +/** Defines headers for PageBlob_uploadPagesFromURL operation. */ +export declare interface PageBlobUploadPagesFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation. + */ +export declare interface PageBlobUploadPagesFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: PageBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the uploadPagesFromURL operation. */ +export declare type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesFromURLHeaders; + }; +}; + +/** Defines headers for PageBlob_uploadPages operation. */ +export declare interface PageBlobUploadPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.uploadPages} operation. + */ +export declare interface PageBlobUploadPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Callback to receive events on the progress of upload pages operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the uploadPages operation. */ +export declare type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesHeaders; + }; +}; + +/** + * List of page ranges for a blob. + */ +export declare interface PageList { + /** + * Valid non-overlapping page ranges. + */ + pageRange?: Range_2[]; + /** + * Present if the prevSnapshot parameter was specified and there were cleared + * pages between the previous snapshot and the target snapshot. + */ + clearRange?: Range_2[]; +} + +/** the list of pages */ +declare interface PageList_2 { + pageRange?: PageRange[]; + clearRange?: ClearRange[]; + continuationToken?: string; +} + +declare interface PageRange { + start: number; + end: number; +} + +export declare interface PageRangeInfo { + start: number; + end: number; + isClear: boolean; +} + +/** + * The multipart/mixed response which contains the response for each subrequest. + */ +export declare interface ParsedBatchResponse { + /** + * The parsed sub responses. + */ + subResponses: BatchSubResponse[]; + /** + * The succeeded executed sub responses' count; + */ + subResponsesSucceededCount: number; + /** + * The failed executed sub responses' count; + */ + subResponsesFailedCount: number; +} + +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare class Pipeline implements PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories: RequestPolicyFactory[], options?: PipelineOptions); + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} + +/** + * An interface for the {@link Pipeline} class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare interface PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} + +/** + * Option interface for Pipeline constructor. + */ +export declare interface PipelineOptions { + /** + * Optional. Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; +} + +export { PollerLike } + +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export declare interface PollerLikeWithCancellation, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +export { PollOperationState } + +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export declare enum PremiumPageBlobTier { + /** + * P4 Tier. + */ + P4 = "P4", + /** + * P6 Tier. + */ + P6 = "P6", + /** + * P10 Tier. + */ + P10 = "P10", + /** + * P15 Tier. + */ + P15 = "P15", + /** + * P20 Tier. + */ + P20 = "P20", + /** + * P30 Tier. + */ + P30 = "P30", + /** + * P40 Tier. + */ + P40 = "P40", + /** + * P50 Tier. + */ + P50 = "P50", + /** + * P60 Tier. + */ + P60 = "P60", + /** + * P70 Tier. + */ + P70 = "P70", + /** + * P80 Tier. + */ + P80 = "P80" +} + +/** Defines values for PublicAccessType. */ +export declare type PublicAccessType = "container" | "blob"; + +/** + * Range for Blob Service Operations. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations + */ +declare interface Range_2 { + /** + * StartByte, larger than or equal 0. + */ + offset: number; + /** + * Optional. Count of bytes, larger than 0. + * If not provided, will return bytes from offset to the end. + */ + count?: number; +} +export { Range_2 as Range } + +/** Defines values for RehydratePriority. */ +export declare type RehydratePriority = "High" | "Standard"; + +export { RequestPolicy } + +export { RequestPolicyFactory } + +export { RequestPolicyOptions } + +export { RestError } + +/** the retention policy which determines how long the associated data should persist */ +export declare interface RetentionPolicy { + /** Indicates whether a retention policy is enabled for the storage service */ + enabled: boolean; + /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */ + days?: number; +} + +/** + * Allowed IP range for a SAS. + */ +export declare interface SasIPRange { + /** + * Starting IP address in the IP range. + * If end IP doesn't provide, start IP will the only IP allowed. + */ + start: string; + /** + * Optional. IP address that ends the IP range. + * If not provided, start IP will the only IP allowed. + */ + end?: string; +} + +/** + * Protocols for generated SAS. + */ +export declare enum SASProtocol { + /** + * Protocol that allows HTTPS only + */ + Https = "https", + /** + * Protocol that allows both HTTPS and HTTP + */ + HttpsAndHttp = "https,http" +} + +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export declare class SASQueryParameters { + /** + * The storage API version. + */ + readonly version: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + readonly protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + readonly startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + readonly expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + readonly permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + readonly services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + readonly resourceTypes?: string; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + readonly identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + readonly encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + readonly resource?: string; + /** + * The signature for the SAS token. + */ + readonly signature: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + readonly cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + readonly contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + readonly contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + readonly contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + readonly contentType?: string; + /** + * Inner value of getter ipRange. + */ + private readonly ipRangeInner?; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + private readonly signedOid?; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + private readonly signedTenantId?; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + private readonly signedStartsOn?; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + private readonly signedExpiresOn?; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + private readonly signedService?; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + private readonly signedVersion?; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + readonly preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + readonly correlationId?: string; + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange(): SasIPRange | undefined; + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param permissions - Representing the storage permissions + * @param services - Representing the storage services being accessed (only for Account SAS) + * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS) + * @param protocol - Representing the allowed HTTP protocol(s) + * @param startsOn - Representing the start time for this SAS token + * @param expiresOn - Representing the expiry time for this SAS token + * @param ipRange - Representing the range of valid IP addresses for this SAS token + * @param identifier - Representing the signed identifier (only for Service SAS) + * @param resource - Representing the storage container or blob (only for Service SAS) + * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS) + * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS) + * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS) + * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS) + * @param contentType - Representing the content-type header (only for Blob/File Service SAS) + * @param userDelegationKey - Representing the user delegation key properties + * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS) + * @param correlationId - Representing the correlation ID (only for User Delegation SAS) + * @param encryptionScope - + */ + constructor(version: string, signature: string, permissions?: string, services?: string, resourceTypes?: string, protocol?: SASProtocol, startsOn?: Date, expiresOn?: Date, ipRange?: SasIPRange, identifier?: string, resource?: string, cacheControl?: string, contentDisposition?: string, contentEncoding?: string, contentLanguage?: string, contentType?: string, userDelegationKey?: UserDelegationKey, preauthorizedAgentObjectId?: string, correlationId?: string, encryptionScope?: string); + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param options - Optional. Options to construct the SASQueryParameters. + */ + constructor(version: string, signature: string, options?: SASQueryParametersOptions); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString(): string; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + private tryAppendQueryParameter; +} + +/** + * Options to construct {@link SASQueryParameters}. + */ +export declare interface SASQueryParametersOptions { + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes?: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource?: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType?: string; + /** + * User delegation key properties. + */ + userDelegationKey?: UserDelegationKey; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}. + * This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId?: string; +} + +/** Parameter group */ +export declare interface SequenceNumberAccessConditions { + /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */ + ifSequenceNumberLessThanOrEqualTo?: number; + /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */ + ifSequenceNumberLessThan?: number; + /** Specify this header value to operate only on a blob if it has the specified sequence number. */ + ifSequenceNumberEqualTo?: number; +} + +/** Defines values for SequenceNumberActionType. */ +export declare type SequenceNumberActionType = "max" | "update" | "increment"; + +/** Defines headers for Service_filterBlobs operation. */ +export declare interface ServiceFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ServiceFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment & ServiceFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation. + */ +export declare interface ServiceGenerateAccountSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} + +/** Defines headers for Service_getAccountInfo operation. */ +export declare interface ServiceGetAccountInfoHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Identifies the sku name of the account */ + skuName?: SkuName; + /** Identifies the account kind */ + accountKind?: AccountKind; + /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */ + isHierarchicalNamespaceEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getAccountInfo} operation. + */ +export declare interface ServiceGetAccountInfoOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getAccountInfo operation. */ +export declare type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetAccountInfoHeaders; + }; +}; + +/** Defines headers for Service_getProperties operation. */ +export declare interface ServiceGetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getProperties} operation. + */ +export declare interface ServiceGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getProperties operation. */ +export declare type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders & BlobServiceProperties & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceProperties; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetPropertiesHeaders; + }; +}; + +/** Defines headers for Service_getStatistics operation. */ +export declare interface ServiceGetStatisticsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getStatistics} operation. + */ +export declare interface ServiceGetStatisticsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getStatistics operation. */ +export declare type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders & BlobServiceStatistics & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceStatistics; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetStatisticsHeaders; + }; +}; + +/** Defines headers for Service_getUserDelegationKey operation. */ +export declare interface ServiceGetUserDelegationKeyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the Service - Get User Delegation Key. + */ +export declare interface ServiceGetUserDelegationKeyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * Contains response data for the {@link getUserDelegationKey} operation. + */ +export declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceGetUserDelegationKeyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: UserDelegationKeyModel; + }; +}; + +/** + * Options to configure the {@link BlobServiceClient.listContainers} operation. + */ +export declare interface ServiceListContainersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether the container's metadata + * should be returned as part of the response body. + */ + includeMetadata?: boolean; + /** + * Specifies whether soft deleted containers should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether system containers should be included in the response. + */ + includeSystem?: boolean; +} + +/** Defines headers for Service_listContainersSegment operation. */ +export declare interface ServiceListContainersSegmentHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the listContainersSegment operation. */ +export declare type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders & ListContainersSegmentResponse & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: ListContainersSegmentResponse; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceListContainersSegmentHeaders; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.renameContainer} operation. + */ +export declare interface ServiceRenameContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Condition to meet for the source container. + */ + sourceCondition?: LeaseAccessConditions; +} + +/** Defines headers for Service_setProperties operation. */ +export declare interface ServiceSetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.setProperties} operation. + */ +export declare interface ServiceSetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the setProperties operation. */ +export declare type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSetPropertiesHeaders; + }; +}; + +/** Defines headers for Service_submitBatch operation. */ +export declare interface ServiceSubmitBatchHeaders { + /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */ + contentType?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** Error Code */ + errorCode?: string; +} + +/** Optional parameters. */ +export declare interface ServiceSubmitBatchOptionalParamsModel extends coreHttp.OperationOptions { + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; +} + +/** Contains response data for the submitBatch operation. */ +export declare type ServiceSubmitBatchResponseModel = ServiceSubmitBatchHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.undeleteContainer} operation. + */ +export declare interface ServiceUndeleteContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies the new name of the restored container. + * Will use its original name if this is not specified. + * @deprecated Restore container to a different name is not supported by service anymore. + */ + destinationContainerName?: string; +} + +/** + * Signed identifier. + */ +export declare interface SignedIdentifier { + /** + * a unique id + */ + id: string; + /** + * Access Policy + */ + accessPolicy: { + /** + * Optional. The date-time the policy is active + */ + startsOn?: Date; + /** + * Optional. The date-time the policy expires + */ + expiresOn?: Date; + /** + * The permissions for the acl policy + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + */ + permissions?: string; + }; +} + +/** signed identifier */ +export declare interface SignedIdentifierModel { + /** a unique id */ + id: string; + /** An Access policy */ + accessPolicy: AccessPolicy; +} + +/** Defines values for SkuName. */ +export declare type SkuName = "Standard_LRS" | "Standard_GRS" | "Standard_RAGRS" | "Standard_ZRS" | "Premium_LRS"; + +/** The properties that enable an account to host a static website */ +export declare interface StaticWebsite { + /** Indicates whether this account is hosting a static website */ + enabled: boolean; + /** The default name of the index page under each directory */ + indexDocument?: string; + /** The absolute path of the custom 404 page */ + errorDocument404Path?: string; + /** Absolute path of the default index page */ + defaultIndexDocumentPath?: string; +} + +/** + * Defines the known cloud audiences for Storage. + */ +export declare enum StorageBlobAudience { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageOAuthScopes = "https://storage.azure.com/.default", + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + DiskComputeOAuthScopes = "https://disk.compute.azure.com/.default" +} + +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export declare class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; +} + +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export declare class StorageBrowserPolicyFactory implements RequestPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy; +} + +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +declare abstract class StorageClient { + /** + * Encoded URL string value. + */ + readonly url: string; + readonly accountName: string; + /* Excluded from this release type: pipeline */ + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + protected readonly storageClientContext: StorageClientContext; + /** + */ + protected readonly isHttps: boolean; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + protected constructor(url: string, pipeline: PipelineLike); +} + +declare class StorageClientContext extends coreHttp.ServiceClient { + url: string; + version: string; + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url: string, options?: StorageClientOptionalParams); +} + +/** Optional parameters. */ +declare interface StorageClientOptionalParams extends coreHttp.ServiceClientOptions { + /** Specifies the version of the operation to use for this request. */ + version?: string; + /** Overrides client endpoint. */ + endpoint?: string; +} + +/** + * The OAuth scope to use with Azure Storage. + */ +export declare const StorageOAuthScopes: string | string[]; + +/** + * Options interface for the {@link newPipeline} function. + */ +export declare interface StoragePipelineOptions { + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; + /** + * Configures the built-in retry policy behavior. + */ + retryOptions?: StorageRetryOptions; + /** + * Keep alive configurations. Default keep-alive is enabled. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; + /** + * The audience used to retrieve an AAD token. + * By default, audience 'https://storage.azure.com/.default' will be used. + */ + audience?: string | string[]; +} + +/** + * Storage Blob retry options interface. + */ +export declare interface StorageRetryOptions { + /** + * Optional. StorageRetryPolicyType, default is exponential retry policy. + */ + readonly retryPolicyType?: StorageRetryPolicyType; + /** + * Optional. Max try number of attempts, default is 4. + * A value of 1 means 1 try and no retries. + * A value smaller than 1 means default retry number of attempts. + */ + readonly maxTries?: number; + /** + * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request. + * A value of zero or undefined means no default timeout on SDK client, Azure + * Storage server's default timeout policy will be used. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations + */ + readonly tryTimeoutInMs?: number; + /** + * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms). + * The delay increases (exponentially or linearly) with each retry up to a maximum specified by + * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs. + */ + readonly retryDelayInMs?: number; + /** + * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms). + * If you specify 0, then you must also specify 0 for retryDelayInMs. + */ + readonly maxRetryDelayInMs?: number; + /** + * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined + * (the default) then operations are not retried against another host. + * + * NOTE: Before setting this field, make sure you understand the issues around + * reading stale and potentially-inconsistent data at + * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs} + */ + readonly secondaryHost?: string; +} + +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export declare class StorageRetryPolicy extends BaseRequestPolicy { + /** + * RetryOptions. + */ + private readonly retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions); + /** + * Sends request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean; + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + private delay; +} + +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export declare class StorageRetryPolicyFactory implements RequestPolicyFactory { + private retryOptions?; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions?: StorageRetryOptions); + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy; +} + +/** + * RetryPolicy types. + */ +export declare enum StorageRetryPolicyType { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + EXPONENTIAL = 0, + /** + * Linear retry. Retry time delay grows linearly. + */ + FIXED = 1 +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export declare class StorageSharedKeyCredential extends Credential_2 { + /** + * Azure Storage account name; readonly. + */ + readonly accountName: string; + /** + * Azure Storage account key; readonly. + */ + private readonly accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName: string, accountKey: string); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign: string): string; +} + +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export declare class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + private readonly factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential); + /** + * Signs request. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + private getHeaderValueToSign; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + private getCanonicalizedHeadersString; + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + private getCanonicalizedResourceString; +} + +/** Defines values for SyncCopyStatusType. */ +export declare type SyncCopyStatusType = "success"; + +/** + * Specifies HTTP options for conditional requests based on blob tags. + */ +export declare interface TagConditions { + /** + * Optional SQL statement to apply to the tags of the blob. + */ + tagConditions?: string; +} + +/** + * Blob tags. + */ +export declare type Tags = Record; + +/** + * A user delegation key. + */ +export declare interface UserDelegationKey { + /** + * The Azure Active Directory object ID in GUID format. + */ + signedObjectId: string; + /** + * The Azure Active Directory tenant ID in GUID format. + */ + signedTenantId: string; + /** + * The date-time the key is active. + */ + signedStartsOn: Date; + /** + * The date-time the key expires. + */ + signedExpiresOn: Date; + /** + * Abbreviation of the Azure Storage service that accepts the key. + */ + signedService: string; + /** + * The service version that created the key. + */ + signedVersion: string; + /** + * The key as a base64 string. + */ + value: string; +} + +/** A user delegation key */ +export declare interface UserDelegationKeyModel { + /** The Azure Active Directory object ID in GUID format. */ + signedObjectId: string; + /** The Azure Active Directory tenant ID in GUID format */ + signedTenantId: string; + /** The date-time the key is active */ + signedStartsOn: string; + /** The date-time the key expires */ + signedExpiresOn: string; + /** Abbreviation of the Azure Storage service that accepts the key */ + signedService: string; + /** The service version that created the key */ + signedVersion: string; + /** The key as a base64 string */ + value: string; +} + +export { WebResource } + +export { } diff --git a/node_modules/@fastify/busboy/LICENSE b/node_modules/@fastify/busboy/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/node_modules/@fastify/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/README.md b/node_modules/@fastify/busboy/README.md new file mode 100644 index 0000000..ece3cc8 --- /dev/null +++ b/node_modules/@fastify/busboy/README.md @@ -0,0 +1,271 @@ +# busboy + +
+ +[![Build Status](https://github.com/fastify/busboy/actions/workflows/ci.yml/badge.svg)](https://github.com/fastify/busboy/actions) +[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) +[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/fastify/.github/blob/main/SECURITY.md) + +
+ +
+ +[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) +[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) + +
+ +Description +=========== + +A Node.js module for parsing incoming HTML form data. + +This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White, +aimed at addressing long-standing issues with it. + +Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup): + +| Library | Version | Mean time in nanoseconds (less is better) | +|-----------------------|---------|-------------------------------------------| +| busboy | 0.3.1 | `340114` | +| @fastify/busboy | 1.0.0 | `270984` | + +[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31. + +Requirements +============ + +* [Node.js](http://nodejs.org/) 10+ + + +Install +======= + + npm i @fastify/busboy + + +Examples +======== + +* Parsing (multipart) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); +const Busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`); + file.on('data', data => { + console.log(`File [${fieldname}] got ${data.length} bytes`); + }); + file.on('end', () => { + console.log(`File [${fieldname}] Finished`); + }); + }); + busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + console.log(`Field [${fieldname}]: value: ${inspect(val)}`); + }); + busboy.on('finish', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` +
+
+
+ +
+ `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file: +// +// Listening for requests +// File [filefield]: filename: ryan-speaker.jpg, encoding: binary +// File [filefield] got 11971 bytes +// Field [textfield]: value: 'testing! :-)' +// File [filefield] Finished +// Done parsing form! +``` + +* Save all incoming files to disk: + +```javascript +const http = require('node:http'); +const path = require('node:path'); +const os = require('node:os'); +const fs = require('node:fs'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + var saveTo = path.join(os.tmpdir(), path.basename(fieldname)); + file.pipe(fs.createWriteStream(saveTo)); + }); + busboy.on('finish', function() { + res.writeHead(200, { 'Connection': 'close' }); + res.end("That's all folks!"); + }); + return req.pipe(busboy); + } + res.writeHead(404); + res.end(); +}).listen(8000, function() { + console.log('Listening for requests'); +}); +``` + +* Parsing (urlencoded) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + console.log('File [' + fieldname + ']: filename: ' + filename); + file.on('data', function(data) { + console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); + }); + file.on('end', function() { + console.log('File [' + fieldname + '] Finished'); + }); + }); + busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) { + console.log('Field [' + fieldname + ']: value: ' + inspect(val)); + }); + busboy.on('finish', function() { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end('\ +
\ +
\ +
\ + Node.js rules!
\ + \ +
\ + '); + } +}).listen(8000, function() { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// Field [textfield]: value: 'testing! :-)' +// Field [selectfield]: value: '9001' +// Field [checkfield]: value: 'on' +// Done parsing form! +``` + + +API +=== + +_Busboy_ is a _Writable_ stream + +Busboy (special) events +----------------------- + +* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream. + * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * The property `bytesRead` informs about the number of bytes that have been read so far. + +* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found. + +* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + +* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + +* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + + +Busboy methods +-------------- + +* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance. + + * The constructor takes the following valid `config` settings: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false). + + * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default). + + * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default). + + * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8'). + + * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false). + + * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters: + + * fieldName - __string__ The name of the field. + + * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream` + + * fileName - __string__ The name of a file supplied by the part. + + (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`) + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes). + + * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes). + + * **fields** - _integer_ - Max number of non-file fields (Default: Infinity). + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity). + + * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity). + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity). + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 + + * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920. + + * The constructor can throw errors: + + * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters. + + * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute. + + * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number. + + * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse. + + * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all. diff --git a/node_modules/@fastify/busboy/deps/dicer/LICENSE b/node_modules/@fastify/busboy/deps/dicer/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js b/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js new file mode 100644 index 0000000..3c8c081 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js @@ -0,0 +1,213 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const inherits = require('node:util').inherits + +const StreamSearch = require('../../streamsearch/sbmh') + +const PartStream = require('./PartStream') +const HeaderParser = require('./HeaderParser') + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this.listenerCount('preamble') !== 0) { + this.emit('preamble', this._part) + } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { + this.emit('part', this._part) + } else { + this._ignore() + } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js b/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js new file mode 100644 index 0000000..65f667b --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js @@ -0,0 +1,100 @@ +'use strict' + +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits +const getLimit = require('../../../lib/utils/getLimit') + +const StreamSearch = require('../../streamsearch/sbmh') + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js b/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js new file mode 100644 index 0000000..c91da1c --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js @@ -0,0 +1,13 @@ +'use strict' + +const inherits = require('node:util').inherits +const ReadableStream = require('node:stream').Readable + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts b/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts new file mode 100644 index 0000000..3c5b896 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts @@ -0,0 +1,164 @@ +// Type definitions for dicer 0.2 +// Project: https://github.com/mscdex/dicer +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 2.2 +/// + +import stream = require("stream"); + +// tslint:disable:unified-signatures + +/** + * A very fast streaming multipart parser for node.js. + * Dicer is a WritableStream + * + * Dicer (special) events: + * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended. + * - on('part', (stream: PartStream)) - Emitted when a new part has been found. + * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored). + * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too). + */ +export class Dicer extends stream.Writable { + /** + * Creates and returns a new Dicer instance with the following valid config settings: + * + * @param config The configuration to use + */ + constructor(config: Dicer.Config); + /** + * Sets the boundary to use for parsing and performs some initialization needed for parsing. + * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header. + * + * @param boundary The boundary to use + */ + setBoundary(boundary: string): void; + addListener(event: "finish", listener: () => void): this; + addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "trailer", listener: (data: Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "part", listener: (stream: Dicer.PartStream) => void): this; + on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + on(event: "trailer", listener: (data: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "part", listener: (stream: Dicer.PartStream) => void): this; + once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + once(event: "trailer", listener: (data: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "trailer", listener: (data: Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "trailer", listener: (data: Buffer) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pipe", listener: (src: stream.Readable) => void): this; + removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; +} + +declare namespace Dicer { + interface Config { + /** + * This is the boundary used to detect the beginning of a new part. + */ + boundary?: string | undefined; + /** + * If true, preamble header parsing will be performed first. + */ + headerFirst?: boolean | undefined; + /** + * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http). + */ + maxHeaderPairs?: number | undefined; + } + + /** + * PartStream is a _ReadableStream_ + * + * PartStream (special) events: + * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values. + */ + interface PartStream extends stream.Readable { + addListener(event: "header", listener: (header: object) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "header", listener: (header: object) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "header", listener: (header: object) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "header", listener: (header: object) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "header", listener: (header: object) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "header", listener: (header: object) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: Buffer | string) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + } +} \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js new file mode 100644 index 0000000..b90c0e8 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js @@ -0,0 +1,228 @@ +'use strict' + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH diff --git a/node_modules/@fastify/busboy/lib/main.d.ts b/node_modules/@fastify/busboy/lib/main.d.ts new file mode 100644 index 0000000..91b6448 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/main.d.ts @@ -0,0 +1,196 @@ +// Definitions by: Jacob Baskin +// BendingBender +// Igor Savin + +/// + +import * as http from 'http'; +import { Readable, Writable } from 'stream'; +export { Dicer } from "../deps/dicer/lib/dicer"; + +export const Busboy: BusboyConstructor; +export default Busboy; + +export interface BusboyConfig { + /** + * These are the HTTP headers of the incoming request, which are used by individual parsers. + */ + headers: BusboyHeaders; + /** + * `highWaterMark` to use for this Busboy instance. + * @default WritableStream default. + */ + highWaterMark?: number | undefined; + /** + * highWaterMark to use for file streams. + * @default ReadableStream default. + */ + fileHwm?: number | undefined; + /** + * Default character set to use when one isn't defined. + * @default 'utf8' + */ + defCharset?: string | undefined; + /** + * Detect if a Part is a file. + * + * By default a file is detected if contentType + * is application/octet-stream or fileName is not + * undefined. + * + * Modify this to handle e.g. Blobs. + */ + isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean; + /** + * If paths in the multipart 'filename' field shall be preserved. + * @default false + */ + preservePath?: boolean | undefined; + /** + * Various limits on incoming data. + */ + limits?: + | { + /** + * Max field name size (in bytes) + * @default 100 bytes + */ + fieldNameSize?: number | undefined; + /** + * Max field value size (in bytes) + * @default 1MB + */ + fieldSize?: number | undefined; + /** + * Max number of non-file fields + * @default Infinity + */ + fields?: number | undefined; + /** + * For multipart forms, the max file size (in bytes) + * @default Infinity + */ + fileSize?: number | undefined; + /** + * For multipart forms, the max number of file fields + * @default Infinity + */ + files?: number | undefined; + /** + * For multipart forms, the max number of parts (fields + files) + * @default Infinity + */ + parts?: number | undefined; + /** + * For multipart forms, the max number of header key=>value pairs to parse + * @default 2000 + */ + headerPairs?: number | undefined; + + /** + * For multipart forms, the max size of a header part + * @default 81920 + */ + headerSize?: number | undefined; + } + | undefined; +} + +export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders; + +export interface BusboyFileStream extends + Readable { + + truncated: boolean; + + /** + * The number of bytes that have been read so far. + */ + bytesRead: number; +} + +export interface Busboy extends Writable { + addListener(event: Event, listener: BusboyEvents[Event]): this; + + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + + on(event: Event, listener: BusboyEvents[Event]): this; + + on(event: string | symbol, listener: (...args: any[]) => void): this; + + once(event: Event, listener: BusboyEvents[Event]): this; + + once(event: string | symbol, listener: (...args: any[]) => void): this; + + removeListener(event: Event, listener: BusboyEvents[Event]): this; + + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + + off(event: Event, listener: BusboyEvents[Event]): this; + + off(event: string | symbol, listener: (...args: any[]) => void): this; + + prependListener(event: Event, listener: BusboyEvents[Event]): this; + + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + + prependOnceListener(event: Event, listener: BusboyEvents[Event]): this; + + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; +} + +export interface BusboyEvents { + /** + * Emitted for each new file form field found. + * + * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the + * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), + * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** + * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically + * and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` + * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * + * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream. + * @param listener.mimeType Contains the 'Content-Type' value for the file stream. + */ + file: ( + fieldname: string, + stream: BusboyFileStream, + filename: string, + transferEncoding: string, + mimeType: string, + ) => void; + /** + * Emitted for each new non-file field found. + */ + field: ( + fieldname: string, + value: string, + fieldnameTruncated: boolean, + valueTruncated: boolean, + transferEncoding: string, + mimeType: string, + ) => void; + finish: () => void; + /** + * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + */ + partsLimit: () => void; + /** + * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + */ + filesLimit: () => void; + /** + * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + */ + fieldsLimit: () => void; + error: (error: unknown) => void; +} + +export interface BusboyConstructor { + (options: BusboyConfig): Busboy; + + new(options: BusboyConfig): Busboy; +} + diff --git a/node_modules/@fastify/busboy/lib/main.js b/node_modules/@fastify/busboy/lib/main.js new file mode 100644 index 0000000..8794beb --- /dev/null +++ b/node_modules/@fastify/busboy/lib/main.js @@ -0,0 +1,85 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const { inherits } = require('node:util') +const Dicer = require('../deps/dicer/lib/Dicer') + +const MultipartParser = require('./types/multipart') +const UrlencodedParser = require('./types/urlencoded') +const parseParams = require('./utils/parseParams') + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports.default = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer diff --git a/node_modules/@fastify/busboy/lib/types/multipart.js b/node_modules/@fastify/busboy/lib/types/multipart.js new file mode 100644 index 0000000..d691eca --- /dev/null +++ b/node_modules/@fastify/busboy/lib/types/multipart.js @@ -0,0 +1,306 @@ +'use strict' + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = require('node:stream') +const { inherits } = require('node:util') + +const Dicer = require('../../deps/dicer/lib/Dicer') + +const parseParams = require('../utils/parseParams') +const decodeText = require('../utils/decodeText') +const basename = require('../utils/basename') +const getLimit = require('../utils/getLimit') + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (boy.listenerCount('file') === 0) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart diff --git a/node_modules/@fastify/busboy/lib/types/urlencoded.js b/node_modules/@fastify/busboy/lib/types/urlencoded.js new file mode 100644 index 0000000..6f5f784 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/types/urlencoded.js @@ -0,0 +1,190 @@ +'use strict' + +const Decoder = require('../utils/Decoder') +const decodeText = require('../utils/decodeText') +const getLimit = require('../utils/getLimit') + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded diff --git a/node_modules/@fastify/busboy/lib/utils/Decoder.js b/node_modules/@fastify/busboy/lib/utils/Decoder.js new file mode 100644 index 0000000..7917678 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/Decoder.js @@ -0,0 +1,54 @@ +'use strict' + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder diff --git a/node_modules/@fastify/busboy/lib/utils/basename.js b/node_modules/@fastify/busboy/lib/utils/basename.js new file mode 100644 index 0000000..db58819 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/basename.js @@ -0,0 +1,14 @@ +'use strict' + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} diff --git a/node_modules/@fastify/busboy/lib/utils/decodeText.js b/node_modules/@fastify/busboy/lib/utils/decodeText.js new file mode 100644 index 0000000..eac7d35 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/decodeText.js @@ -0,0 +1,114 @@ +'use strict' + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch {} + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText diff --git a/node_modules/@fastify/busboy/lib/utils/getLimit.js b/node_modules/@fastify/busboy/lib/utils/getLimit.js new file mode 100644 index 0000000..cb64fd6 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/getLimit.js @@ -0,0 +1,16 @@ +'use strict' + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} diff --git a/node_modules/@fastify/busboy/lib/utils/parseParams.js b/node_modules/@fastify/busboy/lib/utils/parseParams.js new file mode 100644 index 0000000..1698e62 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/parseParams.js @@ -0,0 +1,196 @@ +/* eslint-disable object-property-newline */ +'use strict' + +const decodeText = require('./decodeText') + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams diff --git a/node_modules/@fastify/busboy/package.json b/node_modules/@fastify/busboy/package.json new file mode 100644 index 0000000..83693ac --- /dev/null +++ b/node_modules/@fastify/busboy/package.json @@ -0,0 +1,86 @@ +{ + "name": "@fastify/busboy", + "version": "2.1.1", + "private": false, + "author": "Brian White ", + "contributors": [ + { + "name": "Igor Savin", + "email": "kibertoad@gmail.com", + "url": "https://github.com/kibertoad" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com", + "url": "https://github.com/uzlopak" + } + ], + "description": "A streaming parser for HTML form data for node.js", + "main": "lib/main", + "type": "commonjs", + "types": "lib/main.d.ts", + "scripts": { + "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", + "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", + "coveralls": "nyc report --reporter=lcov", + "lint": "npm run lint:standard", + "lint:everything": "npm run lint && npm run test:types", + "lint:fix": "standard --fix", + "lint:standard": "standard --verbose | snazzy", + "test:mocha": "tap", + "test:types": "tsd", + "test:coverage": "nyc npm run test", + "test": "npm run test:mocha" + }, + "engines": { + "node": ">=14" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "busboy": "^1.0.0", + "photofinish": "^1.8.0", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.3.8", + "tinybench": "^2.5.1", + "tsd": "^0.30.0", + "typescript": "^5.0.2" + }, + "keywords": [ + "uploads", + "forms", + "multipart", + "form-data" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/busboy.git" + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": false, + "module": "commonjs", + "target": "ES2017" + } + }, + "standard": { + "globals": [ + "describe", + "it" + ], + "ignore": [ + "bench" + ] + }, + "files": [ + "README.md", + "LICENSE", + "lib/*", + "deps/encoding/*", + "deps/dicer/lib", + "deps/streamsearch/", + "deps/dicer/LICENSE" + ] +} diff --git a/node_modules/@opentelemetry/api/LICENSE b/node_modules/@opentelemetry/api/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/node_modules/@opentelemetry/api/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@opentelemetry/api/README.md b/node_modules/@opentelemetry/api/README.md new file mode 100644 index 0000000..56dd23d --- /dev/null +++ b/node_modules/@opentelemetry/api/README.md @@ -0,0 +1,117 @@ +# OpenTelemetry API for JavaScript + +

+ +API Reference +  •   +Documentation +
+ + NPM Release + +
+

+ +This package provides everything needed to interact with the OpenTelemetry API, including all TypeScript interfaces, enums, and no-op implementations. It is intended for use both on the server and in the browser. + +The methods in this package perform no operations by default. This means they can be safely called by a library or end-user application whether there is an SDK registered or not. In order to generate and export telemetry data, you will also need an SDK such as the [OpenTelemetry JS SDK][opentelemetry-js]. + +## Tracing Quick Start + +### You Will Need + +- An application you wish to instrument +- [OpenTelemetry JS SDK][opentelemetry-js] +- Node.js >=8.5.0 (14+ is preferred) or an ECMAScript 5+ compatible browser + +**Note:** ECMAScript 5+ compatibility is for this package only. Please refer to the documentation for the SDK you are using to determine its minimum ECMAScript version. + +**Note for library authors:** Only your end users will need an OpenTelemetry SDK. If you wish to support OpenTelemetry in your library, you only need to use the OpenTelemetry API. For more information, please read the [tracing documentation][docs-tracing]. + +### Install Dependencies + +```sh +npm install @opentelemetry/api @opentelemetry/sdk-trace-base +``` + +### Trace Your Application + +In order to get started with tracing, you will need to first register an SDK. The SDK you are using may provide a convenience method which calls the registration methods for you, but if you would like to call them directly they are documented here: [sdk registration methods][docs-sdk-registration]. + +Once you have registered an SDK, you can start and end spans. A simple example of basic SDK registration and tracing a simple operation is below. The example should export spans to the console once per second. For more information, see the [tracing documentation][docs-tracing]. + +```javascript +const { trace } = require("@opentelemetry/api"); +const { BasicTracerProvider, ConsoleSpanExporter, SimpleSpanProcessor } = require("@opentelemetry/sdk-trace-base"); + +// Create and register an SDK +const provider = new BasicTracerProvider(); +provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter())); +trace.setGlobalTracerProvider(provider); + +// Acquire a tracer from the global tracer provider which will be used to trace the application +const name = 'my-application-name'; +const version = '0.1.0'; +const tracer = trace.getTracer(name, version); + +// Trace your application by creating spans +async function operation() { + const span = tracer.startSpan("do operation"); + + // mock some work by sleeping 1 second + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }) + + span.end(); +} + +async function main() { + while (true) { + await operation(); + } +} + +main(); +``` + +## Version Compatibility + +Because the npm installer and node module resolution algorithm could potentially allow two or more copies of any given package to exist within the same `node_modules` structure, the OpenTelemetry API takes advantage of a variable on the `global` object to store the global API. When an API method in the API package is called, it checks if this `global` API exists and proxies calls to it if and only if it is a compatible API version. This means if a package has a dependency on an OpenTelemetry API version which is not compatible with the API used by the end user, the package will receive a no-op implementation of the API. + +## Upgrade Guidelines + +### 0.21.0 to 1.0.0 + +No breaking changes + +### 0.20.0 to 0.21.0 + +- [#78](https://github.com/open-telemetry/opentelemetry-js-api/issues/78) `api.context.bind` arguments reversed and `context` is now a required argument. +- [#46](https://github.com/open-telemetry/opentelemetry-js-api/issues/46) Noop classes and singletons are no longer exported. To create a noop span it is recommended to use `api.trace.wrapSpanContext` with `INVALID_SPAN_CONTEXT` instead of using the `NOOP_TRACER`. + +### 1.0.0-rc.3 to 0.20.0 + +- Removing `TimedEvent` which was not part of spec +- `HttpBaggage` renamed to `HttpBaggagePropagator` +- [#45](https://github.com/open-telemetry/opentelemetry-js-api/pull/45) `Span#context` renamed to `Span#spanContext` +- [#47](https://github.com/open-telemetry/opentelemetry-js-api/pull/47) `getSpan`/`setSpan`/`getSpanContext`/`setSpanContext` moved to `trace` namespace +- [#55](https://github.com/open-telemetry/opentelemetry-js-api/pull/55) `getBaggage`/`setBaggage`/`createBaggage` moved to `propagation` namespace + +## Useful links + +- For more information on OpenTelemetry, visit: +- For more about OpenTelemetry JavaScript: +- For help or feedback on this project, join us in [GitHub Discussions][discussions-url] + +## License + +Apache 2.0 - See [LICENSE][license-url] for more information. + +[opentelemetry-js]: https://github.com/open-telemetry/opentelemetry-js + +[discussions-url]: https://github.com/open-telemetry/opentelemetry-js/discussions +[license-url]: https://github.com/open-telemetry/opentelemetry-js/blob/main/api/LICENSE +[license-image]: https://img.shields.io/badge/license-Apache_2.0-green.svg?style=flat +[docs-tracing]: https://github.com/open-telemetry/opentelemetry-js/blob/main/doc/tracing.md +[docs-sdk-registration]: https://github.com/open-telemetry/opentelemetry-js/blob/main/doc/sdk-registration.md diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.d.ts b/node_modules/@opentelemetry/api/build/esm/api/context.d.ts new file mode 100644 index 0000000..61caee8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js b/node_modules/@opentelemetry/api/build/esm/api/context.js new file mode 100644 index 0000000..0d02f97 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.js @@ -0,0 +1,110 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __read = (this && this.__read) || function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +}; +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +import { NoopContextManager } from '../context/NoopContextManager'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'context'; +var NOOP_CONTEXT_MANAGER = new NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +var ContextAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function ContextAPI() { + } + /** Get the singleton instance of the Context API */ + ContextAPI.getInstance = function () { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + }; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + ContextAPI.prototype.setGlobalContextManager = function (contextManager) { + return registerGlobal(API_NAME, contextManager, DiagAPI.instance()); + }; + /** + * Get the currently active context + */ + ContextAPI.prototype.active = function () { + return this._getContextManager().active(); + }; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + ContextAPI.prototype.with = function (context, fn, thisArg) { + var _a; + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], __read(args), false)); + }; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + ContextAPI.prototype.bind = function (context, target) { + return this._getContextManager().bind(context, target); + }; + ContextAPI.prototype._getContextManager = function () { + return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + }; + /** Disable and remove the global context manager */ + ContextAPI.prototype.disable = function () { + this._getContextManager().disable(); + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + return ContextAPI; +}()); +export { ContextAPI }; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js.map b/node_modules/@opentelemetry/api/build/esm/api/context.js.map new file mode 100644 index 0000000..6938b71 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AAEnE,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,IAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH;IAGE,+FAA+F;IAC/F;IAAuB,CAAC;IAExB,oDAAoD;IACtC,sBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAuB,GAA9B,UAA+B,cAA8B;QAC3D,OAAO,cAAc,CAAC,QAAQ,EAAE,cAAc,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,2BAAM,GAAb;QACE,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,yBAAI,GAAX,UACE,OAAgB,EAChB,EAAK,EACL,OAA8B;;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,CAAA,KAAA,IAAI,CAAC,kBAAkB,EAAE,CAAA,CAAC,IAAI,0BAAC,OAAO,EAAE,EAAE,EAAE,OAAO,UAAK,IAAI,WAAE;IACvE,CAAC;IAED;;;;;OAKG;IACI,yBAAI,GAAX,UAAe,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,uCAAkB,GAA1B;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,4BAAO,GAAd;QACE,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IACH,iBAAC;AAAD,CAAC,AAnED,IAmEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts b/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts new file mode 100644 index 0000000..131db17 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts @@ -0,0 +1,30 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLoggerApi } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger, DiagLoggerApi { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + setLogger: DiagLoggerApi['setLogger']; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js b/node_modules/@opentelemetry/api/build/esm/api/diag.js new file mode 100644 index 0000000..25eb9af --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.js @@ -0,0 +1,121 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __read = (this && this.__read) || function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +}; +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +import { DiagComponentLogger } from '../diag/ComponentLogger'; +import { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger'; +import { DiagLogLevel, } from '../diag/types'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +var API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +var DiagAPI = /** @class */ (function () { + /** + * Private internal constructor + * @private + */ + function DiagAPI() { + function _logProxy(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + var logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName].apply(logger, __spreadArray([], __read(args), false)); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + var self = this; + // DiagAPI specific functions + var setLogger = function (logger, optionsOrLogLevel) { + var _a, _b, _c; + if (optionsOrLogLevel === void 0) { optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }; } + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + if (typeof optionsOrLogLevel === 'number') { + optionsOrLogLevel = { + logLevel: optionsOrLogLevel, + }; + } + var oldLogger = getGlobal('diag'); + var newLogger = createLogLevelDiagLogger((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : DiagLogLevel.INFO, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { + var stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; + oldLogger.warn("Current logger will be overwritten from " + stack); + newLogger.warn("Current logger will overwrite one already registered from " + stack); + } + return registerGlobal('diag', newLogger, self, true); + }; + self.setLogger = setLogger; + self.disable = function () { + unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = function (options) { + return new DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + DiagAPI.instance = function () { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + }; + return DiagAPI; +}()); +export { DiagAPI }; +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js.map b/node_modules/@opentelemetry/api/build/esm/api/diag.js.map new file mode 100644 index 0000000..380ed2d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAC9D,OAAO,EAAE,wBAAwB,EAAE,MAAM,iCAAiC,CAAC;AAC3E,OAAO,EAKL,YAAY,GACb,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAElC,IAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH;IAYE;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,2BAAc,IAAI,WAAE;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,IAAM,SAAS,GAA+B,UAC5C,MAAM,EACN,iBAAmD;;YAAnD,kCAAA,EAAA,sBAAsB,QAAQ,EAAE,YAAY,CAAC,IAAI,EAAE;YAEnD,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBACzC,iBAAiB,GAAG;oBAClB,QAAQ,EAAE,iBAAiB;iBAC5B,CAAC;aACH;YAED,IAAM,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;YACpC,IAAM,SAAS,GAAG,wBAAwB,CACxC,MAAA,iBAAiB,CAAC,QAAQ,mCAAI,YAAY,CAAC,IAAI,EAC/C,MAAM,CACP,CAAC;YACF,kFAAkF;YAClF,IAAI,SAAS,IAAI,CAAC,iBAAiB,CAAC,uBAAuB,EAAE;gBAC3D,IAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,6CAA2C,KAAO,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,+DAA6D,KAAO,CACrE,CAAC;aACH;YAED,OAAO,cAAc,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,CAAC,OAAO,GAAG;YACb,gBAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,UAAC,OAA+B;YAC3D,OAAO,IAAI,mBAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAjFD,oDAAoD;IACtC,gBAAQ,GAAtB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IA+FH,cAAC;AAAD,CAAC,AAzGD,IAyGC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLoggerApi,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger, DiagLoggerApi {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n const setLogger: DiagLoggerApi['setLogger'] = (\n logger,\n optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n if (typeof optionsOrLogLevel === 'number') {\n optionsOrLogLevel = {\n logLevel: optionsOrLogLevel,\n };\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(\n optionsOrLogLevel.logLevel ?? DiagLogLevel.INFO,\n logger\n );\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.setLogger = setLogger;\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n public setLogger!: DiagLoggerApi['setLogger'];\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/metrics.d.ts b/node_modules/@opentelemetry/api/build/esm/api/metrics.d.ts new file mode 100644 index 0000000..5adc145 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/metrics.d.ts @@ -0,0 +1,28 @@ +import { Meter, MeterOptions } from '../metrics/Meter'; +import { MeterProvider } from '../metrics/MeterProvider'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +export declare class MetricsAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Metrics API */ + static getInstance(): MetricsAPI; + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider: MeterProvider): boolean; + /** + * Returns the global meter provider. + */ + getMeterProvider(): MeterProvider; + /** + * Returns a meter from the global meter provider. + */ + getMeter(name: string, version?: string, options?: MeterOptions): Meter; + /** Remove the global meter provider */ + disable(): void; +} +//# sourceMappingURL=metrics.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/metrics.js b/node_modules/@opentelemetry/api/build/esm/api/metrics.js new file mode 100644 index 0000000..92c575a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/metrics.js @@ -0,0 +1,60 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'metrics'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +var MetricsAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function MetricsAPI() { + } + /** Get the singleton instance of the Metrics API */ + MetricsAPI.getInstance = function () { + if (!this._instance) { + this._instance = new MetricsAPI(); + } + return this._instance; + }; + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + MetricsAPI.prototype.setGlobalMeterProvider = function (provider) { + return registerGlobal(API_NAME, provider, DiagAPI.instance()); + }; + /** + * Returns the global meter provider. + */ + MetricsAPI.prototype.getMeterProvider = function () { + return getGlobal(API_NAME) || NOOP_METER_PROVIDER; + }; + /** + * Returns a meter from the global meter provider. + */ + MetricsAPI.prototype.getMeter = function (name, version, options) { + return this.getMeterProvider().getMeter(name, version, options); + }; + /** Remove the global meter provider */ + MetricsAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + return MetricsAPI; +}()); +export { MetricsAPI }; +//# sourceMappingURL=metrics.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/metrics.js.map b/node_modules/@opentelemetry/api/build/esm/api/metrics.js.map new file mode 100644 index 0000000..39c6955 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/metrics.js.map @@ -0,0 +1 @@ +{"version":3,"file":"metrics.js","sourceRoot":"","sources":["../../../src/api/metrics.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,SAAS,CAAC;AAE3B;;GAEG;AACH;IAGE,+FAA+F;IAC/F;IAAuB,CAAC;IAExB,oDAAoD;IACtC,sBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;OAGG;IACI,2CAAsB,GAA7B,UAA8B,QAAuB;QACnD,OAAO,cAAc,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;OAEG;IACI,qCAAgB,GAAvB;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,mBAAmB,CAAC;IACpD,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf,UACE,IAAY,EACZ,OAAgB,EAChB,OAAsB;QAEtB,OAAO,IAAI,CAAC,gBAAgB,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAClE,CAAC;IAED,uCAAuC;IAChC,4BAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IACH,iBAAC;AAAD,CAAC,AA7CD,IA6CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from '../metrics/Meter';\nimport { MeterProvider } from '../metrics/MeterProvider';\nimport { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'metrics';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Metrics API\n */\nexport class MetricsAPI {\n private static _instance?: MetricsAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Metrics API */\n public static getInstance(): MetricsAPI {\n if (!this._instance) {\n this._instance = new MetricsAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global meter provider.\n * Returns true if the meter provider was successfully registered, else false.\n */\n public setGlobalMeterProvider(provider: MeterProvider): boolean {\n return registerGlobal(API_NAME, provider, DiagAPI.instance());\n }\n\n /**\n * Returns the global meter provider.\n */\n public getMeterProvider(): MeterProvider {\n return getGlobal(API_NAME) || NOOP_METER_PROVIDER;\n }\n\n /**\n * Returns a meter from the global meter provider.\n */\n public getMeter(\n name: string,\n version?: string,\n options?: MeterOptions\n ): Meter {\n return this.getMeterProvider().getMeter(name, version, options);\n }\n\n /** Remove the global meter provider */\n public disable(): void {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts new file mode 100644 index 0000000..a22d24d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts @@ -0,0 +1,49 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + getActiveBaggage: typeof getActiveBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js b/node_modules/@opentelemetry/api/build/esm/api/propagation.js new file mode 100644 index 0000000..d3f6f83 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.js @@ -0,0 +1,89 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator'; +import { defaultTextMapGetter, defaultTextMapSetter, } from '../propagation/TextMapPropagator'; +import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage, } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'propagation'; +var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +var PropagationAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function PropagationAPI() { + this.createBaggage = createBaggage; + this.getBaggage = getBaggage; + this.getActiveBaggage = getActiveBaggage; + this.setBaggage = setBaggage; + this.deleteBaggage = deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + PropagationAPI.getInstance = function () { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + }; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + PropagationAPI.prototype.setGlobalPropagator = function (propagator) { + return registerGlobal(API_NAME, propagator, DiagAPI.instance()); + }; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + PropagationAPI.prototype.inject = function (context, carrier, setter) { + if (setter === void 0) { setter = defaultTextMapSetter; } + return this._getGlobalPropagator().inject(context, carrier, setter); + }; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + PropagationAPI.prototype.extract = function (context, carrier, getter) { + if (getter === void 0) { getter = defaultTextMapGetter; } + return this._getGlobalPropagator().extract(context, carrier, getter); + }; + /** + * Return a list of all fields which may be used by the propagator. + */ + PropagationAPI.prototype.fields = function () { + return this._getGlobalPropagator().fields(); + }; + /** Remove the global propagator */ + PropagationAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + PropagationAPI.prototype._getGlobalPropagator = function () { + return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + }; + return PropagationAPI; +}()); +export { PropagationAPI }; +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map b/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map new file mode 100644 index 0000000..1cad8f8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,qBAAqB,EAAE,MAAM,sCAAsC,CAAC;AAC7E,OAAO,EACL,oBAAoB,EACpB,oBAAoB,GAIrB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,UAAU,EACV,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,IAAM,wBAAwB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH;IAGE,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,aAAa,CAAC;QAE9B,eAAU,GAAG,UAAU,CAAC;QAExB,qBAAgB,GAAG,gBAAgB,CAAC;QAEpC,eAAU,GAAG,UAAU,CAAC;QAExB,kBAAa,GAAG,aAAa,CAAC;IAtEd,CAAC;IAExB,uDAAuD;IACzC,0BAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAmB,GAA1B,UAA2B,UAA6B;QACtD,OAAO,cAAc,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,+BAAM,GAAb,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,6BAAqD;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,gCAAO,GAAd,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,6BAAqD;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,+BAAM,GAAb;QACE,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,gCAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAYO,6CAAoB,GAA5B;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;IACH,qBAAC;AAAD,CAAC,AA/ED,IA+EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n getActiveBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public getActiveBaggage = getActiveBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts b/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts new file mode 100644 index 0000000..df59fd2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts @@ -0,0 +1,40 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getActiveSpan: typeof getActiveSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js b/node_modules/@opentelemetry/api/build/esm/api/trace.js new file mode 100644 index 0000000..a4aa6e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.js @@ -0,0 +1,77 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { ProxyTracerProvider } from '../trace/ProxyTracerProvider'; +import { isSpanContextValid, wrapSpanContext, } from '../trace/spancontext-utils'; +import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext, } from '../trace/context-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +var TraceAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function TraceAPI() { + this._proxyTracerProvider = new ProxyTracerProvider(); + this.wrapSpanContext = wrapSpanContext; + this.isSpanContextValid = isSpanContextValid; + this.deleteSpan = deleteSpan; + this.getSpan = getSpan; + this.getActiveSpan = getActiveSpan; + this.getSpanContext = getSpanContext; + this.setSpan = setSpan; + this.setSpanContext = setSpanContext; + } + /** Get the singleton instance of the Trace API */ + TraceAPI.getInstance = function () { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + }; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + TraceAPI.prototype.setGlobalTracerProvider = function (provider) { + var success = registerGlobal(API_NAME, this._proxyTracerProvider, DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + }; + /** + * Returns the global tracer provider. + */ + TraceAPI.prototype.getTracerProvider = function () { + return getGlobal(API_NAME) || this._proxyTracerProvider; + }; + /** + * Returns a tracer from the global tracer provider. + */ + TraceAPI.prototype.getTracer = function (name, version) { + return this.getTracerProvider().getTracer(name, version); + }; + /** Remove the global tracer provider */ + TraceAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider(); + }; + return TraceAPI; +}()); +export { TraceAPI }; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js.map b/node_modules/@opentelemetry/api/build/esm/api/trace.js.map new file mode 100644 index 0000000..122b7e2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,kBAAkB,EAClB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAGpC,OAAO,EACL,UAAU,EACV,aAAa,EACb,OAAO,EACP,cAAc,EACd,OAAO,EACP,cAAc,GACf,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH;IAKE,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,eAAe,CAAC;QAElC,uBAAkB,GAAG,kBAAkB,CAAC;QAExC,eAAU,GAAG,UAAU,CAAC;QAExB,YAAO,GAAG,OAAO,CAAC;QAElB,kBAAa,GAAG,aAAa,CAAC;QAE9B,mBAAc,GAAG,cAAc,CAAC;QAEhC,YAAO,GAAG,OAAO,CAAC;QAElB,mBAAc,GAAG,cAAc,CAAC;IA9DhB,CAAC;IAExB,kDAAkD;IACpC,oBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,0CAAuB,GAA9B,UAA+B,QAAwB;QACrD,IAAM,OAAO,GAAG,cAAc,CAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,OAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,oCAAiB,GAAxB;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,4BAAS,GAAhB,UAAiB,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,0BAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;IACxD,CAAC;IAiBH,eAAC;AAAD,CAAC,AArED,IAqEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getActiveSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getActiveSpan = getActiveSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts new file mode 100644 index 0000000..23750eb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts @@ -0,0 +1,29 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getActiveBaggage(): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js new file mode 100644 index 0000000..6911334 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js @@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ContextAPI } from '../api/context'; +import { createContextKey } from '../context/context'; +/** + * Baggage key + */ +var BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +export function getActiveBaggage() { + return getBaggage(ContextAPI.getInstance().active()); +} +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map new file mode 100644 index 0000000..8670ebd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD;;GAEG;AACH,IAAM,WAAW,GAAG,gBAAgB,CAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB;IAC9B,OAAO,UAAU,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC;AACvD,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Retrieve the current baggage from the active/current context\n *\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getActiveBaggage(): Baggage | undefined {\n return getBaggage(ContextAPI.getInstance().active());\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts new file mode 100644 index 0000000..e6b4554 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js new file mode 100644 index 0000000..c29d685 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js @@ -0,0 +1,98 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __read = (this && this.__read) || function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +}; +var __values = (this && this.__values) || function(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +}; +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var _b = __read(_a, 2), k = _b[0], v = _b[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var e_1, _a; + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + try { + for (var keys_1 = __values(keys), keys_1_1 = keys_1.next(); !keys_1_1.done; keys_1_1 = keys_1.next()) { + var key = keys_1_1.value; + newBaggage._entries.delete(key); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (keys_1_1 && !keys_1_1.done && (_a = keys_1.return)) _a.call(keys_1); + } + finally { if (e_1) throw e_1.error; } + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +export { BaggageImpl }; +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map new file mode 100644 index 0000000..1f3d95a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIH;IAGE,qBAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW;QAClB,IAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,mCAAa,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,UAAC,EAAM;gBAAN,KAAA,aAAM,EAAL,CAAC,QAAA,EAAE,CAAC,QAAA;YAAM,OAAA,CAAC,CAAC,EAAE,CAAC,CAAC;QAAN,CAAM,CAAC,CAAC;IACrE,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW,EAAE,KAAmB;QACvC,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,iCAAW,GAAX,UAAY,GAAW;QACrB,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,mCAAa,GAAb;;QAAc,cAAiB;aAAjB,UAAiB,EAAjB,qBAAiB,EAAjB,IAAiB;YAAjB,yBAAiB;;QAC7B,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;;YAClD,KAAkB,IAAA,SAAA,SAAA,IAAI,CAAA,0BAAA,4CAAE;gBAAnB,IAAM,GAAG,iBAAA;gBACZ,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACjC;;;;;;;;;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,2BAAK,GAAL;QACE,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts new file mode 100644 index 0000000..9cd991c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js new file mode 100644 index 0000000..0e7dc36 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export var baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map new file mode 100644 index 0000000..f074866 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts new file mode 100644 index 0000000..32fa0ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js b/node_modules/@opentelemetry/api/build/esm/baggage/types.js new file mode 100644 index 0000000..928faad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map new file mode 100644 index 0000000..ae80c19 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts new file mode 100644 index 0000000..9955d9e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js new file mode 100644 index 0000000..3cc2716 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagAPI } from '../api/diag'; +import { BaggageImpl } from './internal/baggage-impl'; +import { baggageEntryMetadataSymbol } from './internal/symbol'; +var diag = DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new BaggageImpl(new Map(Object.entries(entries))); +} +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error("Cannot create baggage metadata from unknown type: " + typeof str); + str = ''; + } + return { + __TYPE__: baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map new file mode 100644 index 0000000..f5a00f5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAG/D,IAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,OAA0C;IAA1C,wBAAA,EAAA,YAA0C;IAE1C,OAAO,IAAI,WAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,uDAAqD,OAAO,GAAK,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,0BAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts new file mode 100644 index 0000000..19994fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js new file mode 100644 index 0000000..dbb1e49 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map new file mode 100644 index 0000000..2649c94 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts new file mode 100644 index 0000000..e175a7f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js b/node_modules/@opentelemetry/api/build/esm/common/Exception.js new file mode 100644 index 0000000..6522a8e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map b/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map new file mode 100644 index 0000000..989dd3d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts new file mode 100644 index 0000000..cc3c502 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js b/node_modules/@opentelemetry/api/build/esm/common/Time.js new file mode 100644 index 0000000..2abdf58 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js.map b/node_modules/@opentelemetry/api/build/esm/common/Time.js.map new file mode 100644 index 0000000..ae124f0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context-api.d.ts b/node_modules/@opentelemetry/api/build/esm/context-api.d.ts new file mode 100644 index 0000000..650f4ee --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context-api.d.ts @@ -0,0 +1,4 @@ +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +//# sourceMappingURL=context-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context-api.js b/node_modules/@opentelemetry/api/build/esm/context-api.js new file mode 100644 index 0000000..b89fb25 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export var context = ContextAPI.getInstance(); +//# sourceMappingURL=context-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context-api.js.map b/node_modules/@opentelemetry/api/build/esm/context-api.js.map new file mode 100644 index 0000000..a59bfc1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-api.js","sourceRoot":"","sources":["../../src/context-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAC3C,iCAAiC;AACjC,MAAM,CAAC,IAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts new file mode 100644 index 0000000..48a1659 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js new file mode 100644 index 0000000..9794eff --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __read = (this && this.__read) || function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +}; +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +import { ROOT_CONTEXT } from './context'; +var NoopContextManager = /** @class */ (function () { + function NoopContextManager() { + } + NoopContextManager.prototype.active = function () { + return ROOT_CONTEXT; + }; + NoopContextManager.prototype.with = function (_context, fn, thisArg) { + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return fn.call.apply(fn, __spreadArray([thisArg], __read(args), false)); + }; + NoopContextManager.prototype.bind = function (_context, target) { + return target; + }; + NoopContextManager.prototype.enable = function () { + return this; + }; + NoopContextManager.prototype.disable = function () { + return this; + }; + return NoopContextManager; +}()); +export { NoopContextManager }; +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map new file mode 100644 index 0000000..045925e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAGzC;IAAA;IAyBA,CAAC;IAxBC,mCAAM,GAAN;QACE,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,iCAAI,GAAJ,UACE,QAAuB,EACvB,EAAK,EACL,OAA8B;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,EAAE,CAAC,IAAI,OAAP,EAAE,iBAAM,OAAO,UAAK,IAAI,WAAE;IACnC,CAAC;IAED,iCAAI,GAAJ,UAAQ,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mCAAM,GAAN;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IAED,oCAAO,GAAP;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IACH,yBAAC;AAAD,CAAC,AAzBD,IAyBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.d.ts b/node_modules/@opentelemetry/api/build/esm/context/context.d.ts new file mode 100644 index 0000000..8be0259 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js b/node_modules/@opentelemetry/api/build/esm/context/context.js new file mode 100644 index 0000000..f8909de --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.js @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** Get a key to uniquely identify a context value */ +export function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +var BaseContext = /** @class */ (function () { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + function BaseContext(parentContext) { + // for minification + var self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = function (key) { return self._currentContext.get(key); }; + self.setValue = function (key, value) { + var context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = function (key) { + var context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } + return BaseContext; +}()); +/** The root context is used as the default parent context when there is no active context */ +export var ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js.map b/node_modules/@opentelemetry/api/build/esm/context/context.js.map new file mode 100644 index 0000000..7a7affd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,qDAAqD;AACrD,MAAM,UAAU,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AAED;IAGE;;;;OAIG;IACH,qBAAY,aAAoC;QAC9C,mBAAmB;QACnB,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,IAAK,OAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,EAA7B,CAA6B,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,EAAE,KAAc;YAC1C,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,UAAC,GAAW;YAC7B,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;IAyBH,kBAAC;AAAD,CAAC,AApDD,IAoDC;AAED,6FAA6F;AAC7F,MAAM,CAAC,IAAM,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.d.ts b/node_modules/@opentelemetry/api/build/esm/context/types.d.ts new file mode 100644 index 0000000..7e86632 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js b/node_modules/@opentelemetry/api/build/esm/context/types.js new file mode 100644 index 0000000..928faad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js.map b/node_modules/@opentelemetry/api/build/esm/context/types.js.map new file mode 100644 index 0000000..d438aa3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag-api.d.ts b/node_modules/@opentelemetry/api/build/esm/diag-api.d.ts new file mode 100644 index 0000000..d82fdb1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag-api.d.ts @@ -0,0 +1,9 @@ +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +//# sourceMappingURL=diag-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag-api.js b/node_modules/@opentelemetry/api/build/esm/diag-api.js new file mode 100644 index 0000000..9f85c1b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag-api.js @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export var diag = DiagAPI.instance(); +//# sourceMappingURL=diag-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag-api.js.map b/node_modules/@opentelemetry/api/build/esm/diag-api.js.map new file mode 100644 index 0000000..6b09a0c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag-api.js","sourceRoot":"","sources":["../../src/diag-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACrC;;;;;GAKG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { DiagAPI } from './api/diag';\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts new file mode 100644 index 0000000..f060950 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js new file mode 100644 index 0000000..44bc8be --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __read = (this && this.__read) || function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +}; +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +import { getGlobal } from '../internal/global-utils'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +var DiagComponentLogger = /** @class */ (function () { + function DiagComponentLogger(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + DiagComponentLogger.prototype.debug = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('debug', this._namespace, args); + }; + DiagComponentLogger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('error', this._namespace, args); + }; + DiagComponentLogger.prototype.info = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('info', this._namespace, args); + }; + DiagComponentLogger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('warn', this._namespace, args); + }; + DiagComponentLogger.prototype.verbose = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('verbose', this._namespace, args); + }; + return DiagComponentLogger; +}()); +export { DiagComponentLogger }; +function logProxy(funcName, namespace, args) { + var logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName].apply(logger, __spreadArray([], __read(args), false)); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map new file mode 100644 index 0000000..8671613 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGrD;;;;;;;;GAQG;AACH;IAGE,6BAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,qCAAO,GAAd;QAAe,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IACH,0BAAC;AAAD,CAAC,AA1BD,IA0BC;;AAED,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,2BAAe,IAAoC,WAAE;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts new file mode 100644 index 0000000..fa3db1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js new file mode 100644 index 0000000..5965b8a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +var DiagConsoleLogger = /** @class */ (function () { + function DiagConsoleLogger() { + function _consoleFunc(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + var theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (var i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } + return DiagConsoleLogger; +}()); +export { DiagConsoleLogger }; +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map new file mode 100644 index 0000000..fbfd0cf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,IAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH;IACE;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;IAkCH,wBAAC;AAAD,CAAC,AA3DD,IA2DC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts new file mode 100644 index 0000000..890b9f1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js new file mode 100644 index 0000000..aedab38 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagLogLevel } from '../types'; +export function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < DiagLogLevel.NONE) { + maxLevel = DiagLogLevel.NONE; + } + else if (maxLevel > DiagLogLevel.ALL) { + maxLevel = DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + var theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', DiagLogLevel.ERROR), + warn: _filterFunc('warn', DiagLogLevel.WARN), + info: _filterFunc('info', DiagLogLevel.INFO), + debug: _filterFunc('debug', DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE), + }; +} +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map new file mode 100644 index 0000000..7f9fafd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAA+B,YAAY,EAAE,MAAM,UAAU,CAAC;AAErE,MAAM,UAAU,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,YAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,IAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,YAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts new file mode 100644 index 0000000..ac71ee3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js new file mode 100644 index 0000000..7d5ba63 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map new file mode 100644 index 0000000..bf20aea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,MAAM,UAAU,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts new file mode 100644 index 0000000..e992cc5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts @@ -0,0 +1,100 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +export interface DiagLoggerOptions { + /** + * The {@link DiagLogLevel} used to filter logs sent to the logger. + * + * @defaultValue DiagLogLevel.INFO + */ + logLevel?: DiagLogLevel; + /** + * Setting this value to `true` will suppress the warning message normally emitted when registering a logger when another logger is already registered. + */ + suppressOverrideMessage?: boolean; +} +export interface DiagLoggerApi { + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param options - A {@link DiagLoggerOptions} object. If not provided, default values will be set. + * @returns `true` if the logger was successfully registered, else `false` + */ + setLogger(logger: DiagLogger, options?: DiagLoggerOptions): boolean; + /** + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param logLevel - The {@link DiagLogLevel} used to filter logs sent to the logger. If not provided it will default to {@link DiagLogLevel.INFO}. + * @returns `true` if the logger was successfully registered, else `false` + */ + setLogger(logger: DiagLogger, logLevel?: DiagLogLevel): boolean; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js b/node_modules/@opentelemetry/api/build/esm/diag/types.js new file mode 100644 index 0000000..306585e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel || (DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js.map b/node_modules/@opentelemetry/api/build/esm/diag/types.js.map new file mode 100644 index 0000000..6578cce --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA+CH;;;;GAIG;AACH,MAAM,CAAN,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,KAAZ,YAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n\nexport interface DiagLoggerOptions {\n /**\n * The {@link DiagLogLevel} used to filter logs sent to the logger.\n *\n * @defaultValue DiagLogLevel.INFO\n */\n logLevel?: DiagLogLevel;\n\n /**\n * Setting this value to `true` will suppress the warning message normally emitted when registering a logger when another logger is already registered.\n */\n suppressOverrideMessage?: boolean;\n}\n\nexport interface DiagLoggerApi {\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - The {@link DiagLogger} instance to set as the default logger.\n * @param options - A {@link DiagLoggerOptions} object. If not provided, default values will be set.\n * @returns `true` if the logger was successfully registered, else `false`\n */\n setLogger(logger: DiagLogger, options?: DiagLoggerOptions): boolean;\n\n /**\n *\n * @param logger - The {@link DiagLogger} instance to set as the default logger.\n * @param logLevel - The {@link DiagLogLevel} used to filter logs sent to the logger. If not provided it will default to {@link DiagLogLevel.INFO}.\n * @returns `true` if the logger was successfully registered, else `false`\n */\n setLogger(logger: DiagLogger, logLevel?: DiagLogLevel): boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/index.d.ts b/node_modules/@opentelemetry/api/build/esm/experimental/index.d.ts new file mode 100644 index 0000000..bec3965 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/index.d.ts @@ -0,0 +1,3 @@ +export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; +export { SugaredSpanOptions } from './trace/SugaredOptions'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/index.js b/node_modules/@opentelemetry/api/build/esm/experimental/index.js new file mode 100644 index 0000000..8400e49 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/index.js.map b/node_modules/@opentelemetry/api/build/esm/experimental/index.js.map new file mode 100644 index 0000000..d1699d9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/experimental/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport { wrapTracer, SugaredTracer } from './trace/SugaredTracer';\nexport { SugaredSpanOptions } from './trace/SugaredOptions';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.d.ts b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.d.ts new file mode 100644 index 0000000..89040af --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.d.ts @@ -0,0 +1,13 @@ +import { Span, SpanOptions } from '../../'; +/** + * Options needed for span creation + */ +export interface SugaredSpanOptions extends SpanOptions { + /** + * function to overwrite default exception behavior to record the exception. No exceptions should be thrown in the function. + * @param e Error which triggered this exception + * @param span current span from context + */ + onException?: (e: Error, span: Span) => void; +} +//# sourceMappingURL=SugaredOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js new file mode 100644 index 0000000..0c6a2bd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=SugaredOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js.map b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js.map new file mode 100644 index 0000000..2a18a56 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SugaredOptions.js","sourceRoot":"","sources":["../../../../src/experimental/trace/SugaredOptions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Span, SpanOptions } from '../../';\n\n/**\n * Options needed for span creation\n */\nexport interface SugaredSpanOptions extends SpanOptions {\n /**\n * function to overwrite default exception behavior to record the exception. No exceptions should be thrown in the function.\n * @param e Error which triggered this exception\n * @param span current span from context\n */\n onException?: (e: Error, span: Span) => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.d.ts new file mode 100644 index 0000000..1ba7da9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.d.ts @@ -0,0 +1,64 @@ +import { SugaredSpanOptions } from './SugaredOptions'; +import { Context, Span, Tracer } from '../../'; +/** + * return a new SugaredTracer created from the supplied one + * @param tracer + */ +export declare function wrapTracer(tracer: Tracer): SugaredTracer; +export declare class SugaredTracer implements Tracer { + private readonly _tracer; + constructor(tracer: Tracer); + startActiveSpan: Tracer['startActiveSpan']; + startSpan: Tracer['startSpan']; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally, the new span gets set in context and this context is activated + * for the duration of the function call. + * The span will be closed after the function has executed. + * If an exception occurs, it is recorded, the status is set to ERROR and the exception is rethrown. + * + * @param name The name of the span + * @param [options] SugaredSpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.withActiveSpan('op', span => { + * // do some work + * }); + * @example + * const something = await tracer.withActiveSpan('op', span => { + * // do some async work + * }); + */ + withActiveSpan ReturnType>(name: string, fn: F): ReturnType; + withActiveSpan ReturnType>(name: string, options: SugaredSpanOptions, fn: F): ReturnType; + withActiveSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; + /** + * Starts a new {@link Span} and ends it after execution of fn without setting it on context. + * The span will be closed after the function has executed. + * If an exception occurs, it is recorded, the status is et to ERROR and rethrown. + * + * This method does NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SugaredSpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns Span The newly created span + * @example + * const something = tracer.withSpan('op', span => { + * // do some work + * }); + * @example + * const something = await tracer.withSpan('op', span => { + * // do some async work + * }); + */ + withSpan ReturnType>(name: string, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=SugaredTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js new file mode 100644 index 0000000..6fb98de --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js @@ -0,0 +1,92 @@ +import { context, SpanStatusCode } from '../../'; +var defaultOnException = function (e, span) { + span.recordException(e); + span.setStatus({ + code: SpanStatusCode.ERROR, + }); +}; +/** + * return a new SugaredTracer created from the supplied one + * @param tracer + */ +export function wrapTracer(tracer) { + return new SugaredTracer(tracer); +} +var SugaredTracer = /** @class */ (function () { + function SugaredTracer(tracer) { + this._tracer = tracer; + this.startSpan = tracer.startSpan.bind(this._tracer); + this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer); + } + SugaredTracer.prototype.withActiveSpan = function (name, arg2, arg3, arg4) { + var _a = massageParams(arg2, arg3, arg4), opts = _a.opts, ctx = _a.ctx, fn = _a.fn; + return this._tracer.startActiveSpan(name, opts, ctx, function (span) { + return handleFn(span, opts, fn); + }); + }; + SugaredTracer.prototype.withSpan = function (name, arg2, arg3, arg4) { + var _a = massageParams(arg2, arg3, arg4), opts = _a.opts, ctx = _a.ctx, fn = _a.fn; + var span = this._tracer.startSpan(name, opts, ctx); + return handleFn(span, opts, fn); + }; + return SugaredTracer; +}()); +export { SugaredTracer }; +/** + * Massages parameters of withSpan and withActiveSpan to allow signature overwrites + * @param arg + * @param arg2 + * @param arg3 + */ +function massageParams(arg, arg2, arg3) { + var opts; + var ctx; + var fn; + if (!arg2 && !arg3) { + fn = arg; + } + else if (!arg3) { + opts = arg; + fn = arg2; + } + else { + opts = arg; + ctx = arg2; + fn = arg3; + } + opts = opts !== null && opts !== void 0 ? opts : {}; + ctx = ctx !== null && ctx !== void 0 ? ctx : context.active(); + return { opts: opts, ctx: ctx, fn: fn }; +} +/** + * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling + * @param span + * @param opts + * @param fn + */ +function handleFn(span, opts, fn) { + var _a; + var onException = (_a = opts.onException) !== null && _a !== void 0 ? _a : defaultOnException; + var errorHandler = function (e) { + onException(e, span); + span.end(); + throw e; + }; + try { + var ret = fn(span); + // if fn is an async function, attach a recordException and spanEnd callback to the promise + if (typeof (ret === null || ret === void 0 ? void 0 : ret.then) === 'function') { + return ret.then(function (val) { + span.end(); + return val; + }, errorHandler); + } + span.end(); + return ret; + } + catch (e) { + // add throw to signal the compiler that this will throw in the inner scope + throw errorHandler(e); + } +} +//# sourceMappingURL=SugaredTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js.map b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js.map new file mode 100644 index 0000000..d9bac44 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SugaredTracer.js","sourceRoot":"","sources":["../../../../src/experimental/trace/SugaredTracer.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,OAAO,EAAiB,cAAc,EAAU,MAAM,QAAQ,CAAC;AAExE,IAAM,kBAAkB,GAAG,UAAC,CAAQ,EAAE,IAAU;IAC9C,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,CAAC,SAAS,CAAC;QACb,IAAI,EAAE,cAAc,CAAC,KAAK;KAC3B,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;GAGG;AACH,MAAM,UAAU,UAAU,CAAC,MAAc;IACvC,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,CAAC;AACnC,CAAC;AAED;IAGE,uBAAY,MAAc;QACxB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACnE,CAAC;IA0CD,sCAAc,GAAd,UACE,IAAY,EACZ,IAA4B,EAC5B,IAAkB,EAClB,IAAQ;QAEF,IAAA,KAAoB,aAAa,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,EAAjD,IAAI,UAAA,EAAE,GAAG,SAAA,EAAE,EAAE,QAAoC,CAAC;QAE1D,OAAO,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,UAAC,IAAU;YAC9D,OAAA,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAC;QAAxB,CAAwB,CACR,CAAC;IACrB,CAAC;IA4CD,gCAAQ,GAAR,UACE,IAAY,EACZ,IAA4B,EAC5B,IAAkB,EAClB,IAAQ;QAEF,IAAA,KAAoB,aAAa,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,EAAjD,IAAI,UAAA,EAAE,GAAG,SAAA,EAAE,EAAE,QAAoC,CAAC;QAE1D,IAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAkB,CAAC;IACnD,CAAC;IACH,oBAAC;AAAD,CAAC,AAnHD,IAmHC;;AAED;;;;;GAKG;AACH,SAAS,aAAa,CACpB,GAA2B,EAC3B,IAAkB,EAClB,IAAQ;IAER,IAAI,IAAoC,CAAC;IACzC,IAAI,GAAwB,CAAC;IAC7B,IAAI,EAAK,CAAC;IAEV,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE;QAClB,EAAE,GAAG,GAAQ,CAAC;KACf;SAAM,IAAI,CAAC,IAAI,EAAE;QAChB,IAAI,GAAG,GAAyB,CAAC;QACjC,EAAE,GAAG,IAAS,CAAC;KAChB;SAAM;QACL,IAAI,GAAG,GAAyB,CAAC;QACjC,GAAG,GAAG,IAAe,CAAC;QACtB,EAAE,GAAG,IAAS,CAAC;KAChB;IACD,IAAI,GAAG,IAAI,aAAJ,IAAI,cAAJ,IAAI,GAAI,EAAE,CAAC;IAClB,GAAG,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAE9B,OAAO,EAAE,IAAI,MAAA,EAAE,GAAG,KAAA,EAAE,EAAE,IAAA,EAAE,CAAC;AAC3B,CAAC;AAED;;;;;GAKG;AACH,SAAS,QAAQ,CACf,IAAU,EACV,IAAwB,EACxB,EAAK;;IAEL,IAAM,WAAW,GAAG,MAAA,IAAI,CAAC,WAAW,mCAAI,kBAAkB,CAAC;IAC3D,IAAM,YAAY,GAAG,UAAC,CAAQ;QAC5B,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,MAAM,CAAC,CAAC;IACV,CAAC,CAAC;IAEF,IAAI;QACF,IAAM,GAAG,GAAG,EAAE,CAAC,IAAI,CAA2B,CAAC;QAC/C,2FAA2F;QAC3F,IAAI,OAAO,CAAA,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,CAAA,KAAK,UAAU,EAAE;YACnC,OAAO,GAAG,CAAC,IAAI,CAAC,UAAA,GAAG;gBACjB,IAAI,CAAC,GAAG,EAAE,CAAC;gBACX,OAAO,GAAG,CAAC;YACb,CAAC,EAAE,YAAY,CAAkB,CAAC;SACnC;QACD,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,OAAO,GAAoB,CAAC;KAC7B;IAAC,OAAO,CAAC,EAAE;QACV,2EAA2E;QAC3E,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { SugaredSpanOptions } from './SugaredOptions';\nimport { context, Context, Span, SpanStatusCode, Tracer } from '../../';\n\nconst defaultOnException = (e: Error, span: Span) => {\n span.recordException(e);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n });\n};\n\n/**\n * return a new SugaredTracer created from the supplied one\n * @param tracer\n */\nexport function wrapTracer(tracer: Tracer): SugaredTracer {\n return new SugaredTracer(tracer);\n}\n\nexport class SugaredTracer implements Tracer {\n private readonly _tracer: Tracer;\n\n constructor(tracer: Tracer) {\n this._tracer = tracer;\n this.startSpan = tracer.startSpan.bind(this._tracer);\n this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer);\n }\n\n startActiveSpan: Tracer['startActiveSpan'];\n startSpan: Tracer['startSpan'];\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally, the new span gets set in context and this context is activated\n * for the duration of the function call.\n * The span will be closed after the function has executed.\n * If an exception occurs, it is recorded, the status is set to ERROR and the exception is rethrown.\n *\n * @param name The name of the span\n * @param [options] SugaredSpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.withActiveSpan('op', span => {\n * // do some work\n * });\n * @example\n * const something = await tracer.withActiveSpan('op', span => {\n * // do some async work\n * });\n */\n withActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n arg2: F | SugaredSpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType {\n const { opts, ctx, fn } = massageParams(arg2, arg3, arg4);\n\n return this._tracer.startActiveSpan(name, opts, ctx, (span: Span) =>\n handleFn(span, opts, fn)\n ) as ReturnType;\n }\n\n /**\n * Starts a new {@link Span} and ends it after execution of fn without setting it on context.\n * The span will be closed after the function has executed.\n * If an exception occurs, it is recorded, the status is et to ERROR and rethrown.\n *\n * This method does NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SugaredSpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns Span The newly created span\n * @example\n * const something = tracer.withSpan('op', span => {\n * // do some work\n * });\n * @example\n * const something = await tracer.withSpan('op', span => {\n * // do some async work\n * });\n */\n withSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n arg2: SugaredSpanOptions | F,\n arg3?: Context | F,\n arg4?: F\n ): ReturnType {\n const { opts, ctx, fn } = massageParams(arg2, arg3, arg4);\n\n const span = this._tracer.startSpan(name, opts, ctx);\n return handleFn(span, opts, fn) as ReturnType;\n }\n}\n\n/**\n * Massages parameters of withSpan and withActiveSpan to allow signature overwrites\n * @param arg\n * @param arg2\n * @param arg3\n */\nfunction massageParams ReturnType>(\n arg: F | SugaredSpanOptions,\n arg2?: F | Context,\n arg3?: F\n) {\n let opts: SugaredSpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (!arg2 && !arg3) {\n fn = arg as F;\n } else if (!arg3) {\n opts = arg as SugaredSpanOptions;\n fn = arg2 as F;\n } else {\n opts = arg as SugaredSpanOptions;\n ctx = arg2 as Context;\n fn = arg3 as F;\n }\n opts = opts ?? {};\n ctx = ctx ?? context.active();\n\n return { opts, ctx, fn };\n}\n\n/**\n * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling\n * @param span\n * @param opts\n * @param fn\n */\nfunction handleFn ReturnType>(\n span: Span,\n opts: SugaredSpanOptions,\n fn: F\n): ReturnType {\n const onException = opts.onException ?? defaultOnException;\n const errorHandler = (e: Error) => {\n onException(e, span);\n span.end();\n throw e;\n };\n\n try {\n const ret = fn(span) as Promise>;\n // if fn is an async function, attach a recordException and spanEnd callback to the promise\n if (typeof ret?.then === 'function') {\n return ret.then(val => {\n span.end();\n return val;\n }, errorHandler) as ReturnType;\n }\n span.end();\n return ret as ReturnType;\n } catch (e) {\n // add throw to signal the compiler that this will throw in the inner scope\n throw errorHandler(e);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.d.ts b/node_modules/@opentelemetry/api/build/esm/index.d.ts new file mode 100644 index 0000000..89ae493 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.d.ts @@ -0,0 +1,54 @@ +export { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export { Exception } from './common/Exception'; +export { HrTime, TimeInput } from './common/Time'; +export { Attributes, AttributeValue } from './common/Attributes'; +export { createContextKey, ROOT_CONTEXT } from './context/context'; +export { Context, ContextManager } from './context/types'; +export type { ContextAPI } from './api/context'; +export { DiagConsoleLogger } from './diag/consoleLogger'; +export { DiagLogFunction, DiagLogger, DiagLogLevel, ComponentLoggerOptions, DiagLoggerOptions, } from './diag/types'; +export type { DiagAPI } from './api/diag'; +export { createNoopMeter } from './metrics/NoopMeter'; +export { MeterOptions, Meter } from './metrics/Meter'; +export { MeterProvider } from './metrics/MeterProvider'; +export { ValueType, Counter, Histogram, MetricOptions, Observable, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, BatchObservableCallback, MetricAdvice, MetricAttributes, MetricAttributeValue, ObservableCallback, } from './metrics/Metric'; +export { BatchObservableResult, ObservableResult, } from './metrics/ObservableResult'; +export type { MetricsAPI } from './api/metrics'; +export { TextMapPropagator, TextMapSetter, TextMapGetter, defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; +export type { PropagationAPI } from './api/propagation'; +export { SpanAttributes, SpanAttributeValue } from './trace/attributes'; +export { Link } from './trace/link'; +export { ProxyTracer, TracerDelegator } from './trace/ProxyTracer'; +export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; +export { Sampler } from './trace/Sampler'; +export { SamplingDecision, SamplingResult } from './trace/SamplingResult'; +export { SpanContext } from './trace/span_context'; +export { SpanKind } from './trace/span_kind'; +export { Span } from './trace/span'; +export { SpanOptions } from './trace/SpanOptions'; +export { SpanStatus, SpanStatusCode } from './trace/status'; +export { TraceFlags } from './trace/trace_flags'; +export { TraceState } from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export { TracerProvider } from './trace/tracer_provider'; +export { Tracer } from './trace/tracer'; +export { TracerOptions } from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export type { TraceAPI } from './api/trace'; +import { context } from './context-api'; +import { diag } from './diag-api'; +import { metrics } from './metrics-api'; +import { propagation } from './propagation-api'; +import { trace } from './trace-api'; +export { context, diag, metrics, propagation, trace }; +declare const _default: { + context: import("./api/context").ContextAPI; + diag: import("./api/diag").DiagAPI; + metrics: import("./api/metrics").MetricsAPI; + propagation: import("./api/propagation").PropagationAPI; + trace: import("./api/trace").TraceAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js b/node_modules/@opentelemetry/api/build/esm/index.js new file mode 100644 index 0000000..70cd870 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.js @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { baggageEntryMetadataFromString } from './baggage/utils'; +// Context APIs +export { createContextKey, ROOT_CONTEXT } from './context/context'; +// Diag APIs +export { DiagConsoleLogger } from './diag/consoleLogger'; +export { DiagLogLevel, } from './diag/types'; +// Metrics APIs +export { createNoopMeter } from './metrics/NoopMeter'; +export { ValueType, } from './metrics/Metric'; +// Propagation APIs +export { defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; +export { ProxyTracer } from './trace/ProxyTracer'; +export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; +export { SamplingDecision } from './trace/SamplingResult'; +export { SpanKind } from './trace/span_kind'; +export { SpanStatusCode } from './trace/status'; +export { TraceFlags } from './trace/trace_flags'; +export { createTraceState } from './trace/internal/utils'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { context } from './context-api'; +import { diag } from './diag-api'; +import { metrics } from './metrics-api'; +import { propagation } from './propagation-api'; +import { trace } from './trace-api'; +// Named export. +export { context, diag, metrics, propagation, trace }; +// Default export. +export default { + context: context, + diag: diag, + metrics: metrics, + propagation: propagation, + trace: trace, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js.map b/node_modules/@opentelemetry/api/build/esm/index.js.map new file mode 100644 index 0000000..1df21a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,8BAA8B,EAAE,MAAM,iBAAiB,CAAC;AAKjE,eAAe;AACf,OAAO,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAInE,YAAY;AACZ,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AACzD,OAAO,EAGL,YAAY,GAGb,MAAM,cAAc,CAAC;AAGtB,eAAe;AACf,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAGtD,OAAO,EACL,SAAS,GAcV,MAAM,kBAAkB,CAAC;AAO1B,mBAAmB;AACnB,OAAO,EAIL,oBAAoB,EACpB,oBAAoB,GACrB,MAAM,iCAAiC,CAAC;AAMzC,OAAO,EAAE,WAAW,EAAmB,MAAM,qBAAqB,CAAC;AACnE,OAAO,EAAE,mBAAmB,EAAE,MAAM,6BAA6B,CAAC;AAElE,OAAO,EAAE,gBAAgB,EAAkB,MAAM,wBAAwB,CAAC;AAE1E,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAG7C,OAAO,EAAc,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAC5D,OAAO,EAAE,UAAU,EAAE,MAAM,qBAAqB,CAAC;AAEjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAI1D,OAAO,EACL,kBAAkB,EAClB,cAAc,EACd,aAAa,GACd,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,cAAc,EACd,eAAe,EACf,oBAAoB,GACrB,MAAM,gCAAgC,CAAC;AAGxC,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AACxC,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AACxC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAChD,OAAO,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AAEpC,gBAAgB;AAChB,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,KAAK,EAAE,CAAC;AACtD,kBAAkB;AAClB,eAAe;IACb,OAAO,SAAA;IACP,IAAI,MAAA;IACJ,OAAO,SAAA;IACP,WAAW,aAAA;IACX,KAAK,OAAA;CACN,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport { Exception } from './common/Exception';\nexport { HrTime, TimeInput } from './common/Time';\nexport { Attributes, AttributeValue } from './common/Attributes';\n\n// Context APIs\nexport { createContextKey, ROOT_CONTEXT } from './context/context';\nexport { Context, ContextManager } from './context/types';\nexport type { ContextAPI } from './api/context';\n\n// Diag APIs\nexport { DiagConsoleLogger } from './diag/consoleLogger';\nexport {\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n ComponentLoggerOptions,\n DiagLoggerOptions,\n} from './diag/types';\nexport type { DiagAPI } from './api/diag';\n\n// Metrics APIs\nexport { createNoopMeter } from './metrics/NoopMeter';\nexport { MeterOptions, Meter } from './metrics/Meter';\nexport { MeterProvider } from './metrics/MeterProvider';\nexport {\n ValueType,\n Counter,\n Histogram,\n MetricOptions,\n Observable,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n BatchObservableCallback,\n MetricAdvice,\n MetricAttributes,\n MetricAttributeValue,\n ObservableCallback,\n} from './metrics/Metric';\nexport {\n BatchObservableResult,\n ObservableResult,\n} from './metrics/ObservableResult';\nexport type { MetricsAPI } from './api/metrics';\n\n// Propagation APIs\nexport {\n TextMapPropagator,\n TextMapSetter,\n TextMapGetter,\n defaultTextMapGetter,\n defaultTextMapSetter,\n} from './propagation/TextMapPropagator';\nexport type { PropagationAPI } from './api/propagation';\n\n// Trace APIs\nexport { SpanAttributes, SpanAttributeValue } from './trace/attributes';\nexport { Link } from './trace/link';\nexport { ProxyTracer, TracerDelegator } from './trace/ProxyTracer';\nexport { ProxyTracerProvider } from './trace/ProxyTracerProvider';\nexport { Sampler } from './trace/Sampler';\nexport { SamplingDecision, SamplingResult } from './trace/SamplingResult';\nexport { SpanContext } from './trace/span_context';\nexport { SpanKind } from './trace/span_kind';\nexport { Span } from './trace/span';\nexport { SpanOptions } from './trace/SpanOptions';\nexport { SpanStatus, SpanStatusCode } from './trace/status';\nexport { TraceFlags } from './trace/trace_flags';\nexport { TraceState } from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport { TracerProvider } from './trace/tracer_provider';\nexport { Tracer } from './trace/tracer';\nexport { TracerOptions } from './trace/tracer_options';\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\nexport type { TraceAPI } from './api/trace';\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { context } from './context-api';\nimport { diag } from './diag-api';\nimport { metrics } from './metrics-api';\nimport { propagation } from './propagation-api';\nimport { trace } from './trace-api';\n\n// Named export.\nexport { context, diag, metrics, propagation, trace };\n// Default export.\nexport default {\n context,\n diag,\n metrics,\n propagation,\n trace,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts new file mode 100644 index 0000000..320db97 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts @@ -0,0 +1,18 @@ +import { MeterProvider } from '../metrics/MeterProvider'; +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag/types'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + metrics?: MeterProvider; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js new file mode 100644 index 0000000..88e82a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { _globalThis } from '../platform'; +import { VERSION } from '../version'; +import { isCompatible } from './semver'; +var major = VERSION.split('.')[0]; +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var _global = _globalThis; +export function registerGlobal(type, instance, diag, allowOverride) { + var _a; + if (allowOverride === void 0) { allowOverride = false; } + var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== VERSION) { + // All registered APIs must be of the same version exactly + var err = new Error("@opentelemetry/api: Registration of version v" + api.version + " for " + type + " does not match previously registered API v" + VERSION); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + VERSION + "."); + return true; +} +export function getGlobal(type) { + var _a, _b; + var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +export function unregisterGlobal(type, diag) { + diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + VERSION + "."); + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map new file mode 100644 index 0000000..41420b9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAG1C,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAExC,IAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,0BAAwB,KAAO,CAChC,CAAC;AAEF,IAAM,OAAO,GAAG,WAAyB,CAAC;AAE1C,MAAM,UAAU,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAqB;;IAArB,8BAAA,EAAA,qBAAqB;IAErB,IAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,OAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,kEAAgE,IAAM,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,OAAO,EAAE;QAC3B,0DAA0D;QAC1D,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,kDAAgD,GAAG,CAAC,OAAO,aAAQ,IAAI,mDAA8C,OAAS,CAC/H,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,iDAA+C,IAAI,UAAK,OAAO,MAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,UAAU,SAAS,CACvB,IAAU;;IAEV,IAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,oDAAkD,IAAI,UAAK,OAAO,MAAG,CACtE,CAAC;IACF,IAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { MeterProvider } from '../metrics/MeterProvider';\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag/types';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n `@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${VERSION}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n metrics?: MeterProvider;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts new file mode 100644 index 0000000..d9f4259 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js b/node_modules/@opentelemetry/api/build/esm/internal/semver.js new file mode 100644 index 0000000..2a788a0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.js @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { VERSION } from '../version'; +var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export function _makeCompatibilityCheck(ownVersion) { + var acceptedVersions = new Set([ownVersion]); + var rejectedVersions = new Set(); + var myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return function () { return false; }; + } + var ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + var globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + var globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export var isCompatible = _makeCompatibilityCheck(VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map b/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map new file mode 100644 index 0000000..75579a4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAErC,IAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,uBAAuB,CACrC,UAAkB;IAElB,IAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,IAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,IAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;KACpB;IAED,IAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,IAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,IAAM,YAAY,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics-api.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics-api.d.ts new file mode 100644 index 0000000..26d539c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics-api.d.ts @@ -0,0 +1,4 @@ +import { MetricsAPI } from './api/metrics'; +/** Entrypoint for metrics API */ +export declare const metrics: MetricsAPI; +//# sourceMappingURL=metrics-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics-api.js b/node_modules/@opentelemetry/api/build/esm/metrics-api.js new file mode 100644 index 0000000..145087f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { MetricsAPI } from './api/metrics'; +/** Entrypoint for metrics API */ +export var metrics = MetricsAPI.getInstance(); +//# sourceMappingURL=metrics-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics-api.js.map b/node_modules/@opentelemetry/api/build/esm/metrics-api.js.map new file mode 100644 index 0000000..69dd3e2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"metrics-api.js","sourceRoot":"","sources":["../../src/metrics-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAC3C,iCAAiC;AACjC,MAAM,CAAC,IAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { MetricsAPI } from './api/metrics';\n/** Entrypoint for metrics API */\nexport const metrics = MetricsAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Meter.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.d.ts new file mode 100644 index 0000000..fb26fc8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.d.ts @@ -0,0 +1,104 @@ +import { BatchObservableCallback, Counter, Histogram, MetricAttributes, MetricOptions, Observable, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter } from './Metric'; +/** + * An interface describes additional metadata of a meter. + */ +export interface MeterOptions { + /** + * The schemaUrl of the meter or instrumentation library + */ + schemaUrl?: string; +} +/** + * An interface to allow the recording metrics. + * + * {@link Metric}s are used for recording pre-defined aggregation (`Counter`), + * or raw values (`Histogram`) in which the aggregation and attributes + * for the exported metric are deferred. + */ +export interface Meter { + /** + * Creates and returns a new `Histogram`. + * @param name the name of the metric. + * @param [options] the metric options. + */ + createHistogram(name: string, options?: MetricOptions): Histogram; + /** + * Creates a new `Counter` metric. Generally, this kind of metric when the + * value is a quantity, the sum is of primary interest, and the event count + * and value distribution are not of primary interest. + * @param name the name of the metric. + * @param [options] the metric options. + */ + createCounter(name: string, options?: MetricOptions): Counter; + /** + * Creates a new `UpDownCounter` metric. UpDownCounter is a synchronous + * instrument and very similar to Counter except that Add(increment) + * supports negative increments. It is generally useful for capturing changes + * in an amount of resources used, or any quantity that rises and falls + * during a request. + * Example uses for UpDownCounter: + *
    + *
  1. count the number of active requests.
  2. + *
  3. count memory in use by instrumenting new and delete.
  4. + *
  5. count queue size by instrumenting enqueue and dequeue.
  6. + *
  7. count semaphore up and down operations.
  8. + *
+ * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createUpDownCounter(name: string, options?: MetricOptions): UpDownCounter; + /** + * Creates a new `ObservableGauge` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableGauge(name: string, options?: MetricOptions): ObservableGauge; + /** + * Creates a new `ObservableCounter` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableCounter(name: string, options?: MetricOptions): ObservableCounter; + /** + * Creates a new `ObservableUpDownCounter` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableUpDownCounter(name: string, options?: MetricOptions): ObservableUpDownCounter; + /** + * Sets up a function that will be called whenever a metric collection is + * initiated. + * + * If the function is already in the list of callbacks for this Observable, + * the function is not added a second time. + * + * Only the associated observables can be observed in the callback. + * Measurements of observables that are not associated observed in the + * callback are dropped. + * + * @param callback the batch observable callback + * @param observables the observables associated with this batch observable callback + */ + addBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void; + /** + * Removes a callback previously registered with {@link Meter.addBatchObservableCallback}. + * + * The callback to be removed is identified using a combination of the callback itself, + * and the set of the observables associated with it. + * + * @param callback the batch observable callback + * @param observables the observables associated with this batch observable callback + */ + removeBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void; +} +//# sourceMappingURL=Meter.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js new file mode 100644 index 0000000..f1d0754 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Meter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js.map b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js.map new file mode 100644 index 0000000..8c6841d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Meter.js","sourceRoot":"","sources":["../../../src/metrics/Meter.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n BatchObservableCallback,\n Counter,\n Histogram,\n MetricAttributes,\n MetricOptions,\n Observable,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n} from './Metric';\n\n/**\n * An interface describes additional metadata of a meter.\n */\nexport interface MeterOptions {\n /**\n * The schemaUrl of the meter or instrumentation library\n */\n schemaUrl?: string;\n}\n\n/**\n * An interface to allow the recording metrics.\n *\n * {@link Metric}s are used for recording pre-defined aggregation (`Counter`),\n * or raw values (`Histogram`) in which the aggregation and attributes\n * for the exported metric are deferred.\n */\nexport interface Meter {\n /**\n * Creates and returns a new `Histogram`.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createHistogram(\n name: string,\n options?: MetricOptions\n ): Histogram;\n\n /**\n * Creates a new `Counter` metric. Generally, this kind of metric when the\n * value is a quantity, the sum is of primary interest, and the event count\n * and value distribution are not of primary interest.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createCounter(\n name: string,\n options?: MetricOptions\n ): Counter;\n\n /**\n * Creates a new `UpDownCounter` metric. UpDownCounter is a synchronous\n * instrument and very similar to Counter except that Add(increment)\n * supports negative increments. It is generally useful for capturing changes\n * in an amount of resources used, or any quantity that rises and falls\n * during a request.\n * Example uses for UpDownCounter:\n *
    \n *
  1. count the number of active requests.
  2. \n *
  3. count memory in use by instrumenting new and delete.
  4. \n *
  5. count queue size by instrumenting enqueue and dequeue.
  6. \n *
  7. count semaphore up and down operations.
  8. \n *
\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): UpDownCounter;\n\n /**\n * Creates a new `ObservableGauge` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableGauge<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableGauge;\n\n /**\n * Creates a new `ObservableCounter` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableCounter;\n\n /**\n * Creates a new `ObservableUpDownCounter` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableUpDownCounter;\n\n /**\n * Sets up a function that will be called whenever a metric collection is\n * initiated.\n *\n * If the function is already in the list of callbacks for this Observable,\n * the function is not added a second time.\n *\n * Only the associated observables can be observed in the callback.\n * Measurements of observables that are not associated observed in the\n * callback are dropped.\n *\n * @param callback the batch observable callback\n * @param observables the observables associated with this batch observable callback\n */\n addBatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n callback: BatchObservableCallback,\n observables: Observable[]\n ): void;\n\n /**\n * Removes a callback previously registered with {@link Meter.addBatchObservableCallback}.\n *\n * The callback to be removed is identified using a combination of the callback itself,\n * and the set of the observables associated with it.\n *\n * @param callback the batch observable callback\n * @param observables the observables associated with this batch observable callback\n */\n removeBatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n callback: BatchObservableCallback,\n observables: Observable[]\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.d.ts new file mode 100644 index 0000000..6c08cc3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.d.ts @@ -0,0 +1,17 @@ +import { Meter, MeterOptions } from './Meter'; +/** + * A registry for creating named {@link Meter}s. + */ +export interface MeterProvider { + /** + * Returns a Meter, creating one if one with the given name, version, and + * schemaUrl pair is not already created. + * + * @param name The name of the meter or instrumentation library. + * @param version The version of the meter or instrumentation library. + * @param options The options of the meter or instrumentation library. + * @returns Meter A Meter with the given name and version + */ + getMeter(name: string, version?: string, options?: MeterOptions): Meter; +} +//# sourceMappingURL=MeterProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js new file mode 100644 index 0000000..3051712 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=MeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js.map b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js.map new file mode 100644 index 0000000..8f96d90 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"MeterProvider.js","sourceRoot":"","sources":["../../../src/metrics/MeterProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from './Meter';\n\n/**\n * A registry for creating named {@link Meter}s.\n */\nexport interface MeterProvider {\n /**\n * Returns a Meter, creating one if one with the given name, version, and\n * schemaUrl pair is not already created.\n *\n * @param name The name of the meter or instrumentation library.\n * @param version The version of the meter or instrumentation library.\n * @param options The options of the meter or instrumentation library.\n * @returns Meter A Meter with the given name and version\n */\n getMeter(name: string, version?: string, options?: MeterOptions): Meter;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Metric.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.d.ts new file mode 100644 index 0000000..87a41a8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.d.ts @@ -0,0 +1,109 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +import { Context } from '../context/types'; +import { BatchObservableResult, ObservableResult } from './ObservableResult'; +/** + * Advisory options influencing aggregation configuration parameters. + * @experimental + */ +export interface MetricAdvice { + /** + * Hint the explicit bucket boundaries for SDK if the metric is been + * aggregated with a HistogramAggregator. + */ + explicitBucketBoundaries?: number[]; +} +/** + * Options needed for metric creation + */ +export interface MetricOptions { + /** + * The description of the Metric. + * @default '' + */ + description?: string; + /** + * The unit of the Metric values. + * @default '' + */ + unit?: string; + /** + * Indicates the type of the recorded value. + * @default {@link ValueType.DOUBLE} + */ + valueType?: ValueType; + /** + * The advice influencing aggregation configuration parameters. + * @experimental + */ + advice?: MetricAdvice; +} +/** The Type of value. It describes how the data is reported. */ +export declare enum ValueType { + INT = 0, + DOUBLE = 1 +} +/** + * Counter is the most common synchronous instrument. This instrument supports + * an `Add(increment)` function for reporting a sum, and is restricted to + * non-negative increments. The default aggregation is Sum, as for any additive + * instrument. + * + * Example uses for Counter: + *
    + *
  1. count the number of bytes received.
  2. + *
  3. count the number of requests completed.
  4. + *
  5. count the number of accounts created.
  6. + *
  7. count the number of checkpoints run.
  8. + *
  9. count the number of 5xx errors.
  10. + *
      + */ +export interface Counter { + /** + * Increment value of counter by the input. Inputs must not be negative. + */ + add(value: number, attributes?: AttributesTypes, context?: Context): void; +} +export interface UpDownCounter { + /** + * Increment value of counter by the input. Inputs may be negative. + */ + add(value: number, attributes?: AttributesTypes, context?: Context): void; +} +export interface Histogram { + /** + * Records a measurement. Value of the measurement must not be negative. + */ + record(value: number, attributes?: AttributesTypes, context?: Context): void; +} +/** + * @deprecated please use {@link Attributes} + */ +export declare type MetricAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type MetricAttributeValue = AttributeValue; +/** + * The observable callback for Observable instruments. + */ +export declare type ObservableCallback = (observableResult: ObservableResult) => void | Promise; +/** + * The observable callback for a batch of Observable instruments. + */ +export declare type BatchObservableCallback = (observableResult: BatchObservableResult) => void | Promise; +export interface Observable { + /** + * Sets up a function that will be called whenever a metric collection is initiated. + * + * If the function is already in the list of callbacks for this Observable, the function is not added a second time. + */ + addCallback(callback: ObservableCallback): void; + /** + * Removes a callback previously registered with {@link Observable.addCallback}. + */ + removeCallback(callback: ObservableCallback): void; +} +export declare type ObservableCounter = Observable; +export declare type ObservableUpDownCounter = Observable; +export declare type ObservableGauge = Observable; +//# sourceMappingURL=Metric.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js new file mode 100644 index 0000000..6df1374 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** The Type of value. It describes how the data is reported. */ +export var ValueType; +(function (ValueType) { + ValueType[ValueType["INT"] = 0] = "INT"; + ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; +})(ValueType || (ValueType = {})); +//# sourceMappingURL=Metric.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js.map b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js.map new file mode 100644 index 0000000..c8aef63 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Metric.js","sourceRoot":"","sources":["../../../src/metrics/Metric.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA+CH,gEAAgE;AAChE,MAAM,CAAN,IAAY,SAGX;AAHD,WAAY,SAAS;IACnB,uCAAG,CAAA;IACH,6CAAM,CAAA;AACR,CAAC,EAHW,SAAS,KAAT,SAAS,QAGpB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\nimport { Context } from '../context/types';\nimport { BatchObservableResult, ObservableResult } from './ObservableResult';\n\n/**\n * Advisory options influencing aggregation configuration parameters.\n * @experimental\n */\nexport interface MetricAdvice {\n /**\n * Hint the explicit bucket boundaries for SDK if the metric is been\n * aggregated with a HistogramAggregator.\n */\n explicitBucketBoundaries?: number[];\n}\n\n/**\n * Options needed for metric creation\n */\nexport interface MetricOptions {\n /**\n * The description of the Metric.\n * @default ''\n */\n description?: string;\n\n /**\n * The unit of the Metric values.\n * @default ''\n */\n unit?: string;\n\n /**\n * Indicates the type of the recorded value.\n * @default {@link ValueType.DOUBLE}\n */\n valueType?: ValueType;\n\n /**\n * The advice influencing aggregation configuration parameters.\n * @experimental\n */\n advice?: MetricAdvice;\n}\n\n/** The Type of value. It describes how the data is reported. */\nexport enum ValueType {\n INT,\n DOUBLE,\n}\n\n/**\n * Counter is the most common synchronous instrument. This instrument supports\n * an `Add(increment)` function for reporting a sum, and is restricted to\n * non-negative increments. The default aggregation is Sum, as for any additive\n * instrument.\n *\n * Example uses for Counter:\n *
        \n *
      1. count the number of bytes received.
      2. \n *
      3. count the number of requests completed.
      4. \n *
      5. count the number of accounts created.
      6. \n *
      7. count the number of checkpoints run.
      8. \n *
      9. count the number of 5xx errors.
      10. \n *
          \n */\nexport interface Counter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Increment value of counter by the input. Inputs must not be negative.\n */\n add(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\nexport interface UpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Increment value of counter by the input. Inputs may be negative.\n */\n add(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\nexport interface Histogram<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Records a measurement. Value of the measurement must not be negative.\n */\n record(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type MetricAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type MetricAttributeValue = AttributeValue;\n\n/**\n * The observable callback for Observable instruments.\n */\nexport type ObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = (\n observableResult: ObservableResult\n) => void | Promise;\n\n/**\n * The observable callback for a batch of Observable instruments.\n */\nexport type BatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = (\n observableResult: BatchObservableResult\n) => void | Promise;\n\nexport interface Observable<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Sets up a function that will be called whenever a metric collection is initiated.\n *\n * If the function is already in the list of callbacks for this Observable, the function is not added a second time.\n */\n addCallback(callback: ObservableCallback): void;\n\n /**\n * Removes a callback previously registered with {@link Observable.addCallback}.\n */\n removeCallback(callback: ObservableCallback): void;\n}\n\nexport type ObservableCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\nexport type ObservableUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\nexport type ObservableGauge<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.d.ts new file mode 100644 index 0000000..0edbd2d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.d.ts @@ -0,0 +1,74 @@ +import { Meter } from './Meter'; +import { BatchObservableCallback, Counter, Histogram, MetricOptions, ObservableCallback, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, MetricAttributes, Observable } from './Metric'; +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +export declare class NoopMeter implements Meter { + constructor(); + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name: string, _options?: MetricOptions): Histogram; + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name: string, _options?: MetricOptions): Counter; + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name: string, _options?: MetricOptions): UpDownCounter; + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name: string, _options?: MetricOptions): ObservableGauge; + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name: string, _options?: MetricOptions): ObservableCounter; + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name: string, _options?: MetricOptions): ObservableUpDownCounter; + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback: BatchObservableCallback, _observables: Observable[]): void; + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback: BatchObservableCallback): void; +} +export declare class NoopMetric { +} +export declare class NoopCounterMetric extends NoopMetric implements Counter { + add(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopUpDownCounterMetric extends NoopMetric implements UpDownCounter { + add(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopHistogramMetric extends NoopMetric implements Histogram { + record(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopObservableMetric { + addCallback(_callback: ObservableCallback): void; + removeCallback(_callback: ObservableCallback): void; +} +export declare class NoopObservableCounterMetric extends NoopObservableMetric implements ObservableCounter { +} +export declare class NoopObservableGaugeMetric extends NoopObservableMetric implements ObservableGauge { +} +export declare class NoopObservableUpDownCounterMetric extends NoopObservableMetric implements ObservableUpDownCounter { +} +export declare const NOOP_METER: NoopMeter; +export declare const NOOP_COUNTER_METRIC: NoopCounterMetric; +export declare const NOOP_HISTOGRAM_METRIC: NoopHistogramMetric; +export declare const NOOP_UP_DOWN_COUNTER_METRIC: NoopUpDownCounterMetric; +export declare const NOOP_OBSERVABLE_COUNTER_METRIC: NoopObservableCounterMetric; +export declare const NOOP_OBSERVABLE_GAUGE_METRIC: NoopObservableGaugeMetric; +export declare const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC: NoopObservableUpDownCounterMetric; +/** + * Create a no-op Meter + */ +export declare function createNoopMeter(): Meter; +//# sourceMappingURL=NoopMeter.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js new file mode 100644 index 0000000..a96fbe2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js @@ -0,0 +1,165 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +var NoopMeter = /** @class */ (function () { + function NoopMeter() { + } + /** + * @see {@link Meter.createHistogram} + */ + NoopMeter.prototype.createHistogram = function (_name, _options) { + return NOOP_HISTOGRAM_METRIC; + }; + /** + * @see {@link Meter.createCounter} + */ + NoopMeter.prototype.createCounter = function (_name, _options) { + return NOOP_COUNTER_METRIC; + }; + /** + * @see {@link Meter.createUpDownCounter} + */ + NoopMeter.prototype.createUpDownCounter = function (_name, _options) { + return NOOP_UP_DOWN_COUNTER_METRIC; + }; + /** + * @see {@link Meter.createObservableGauge} + */ + NoopMeter.prototype.createObservableGauge = function (_name, _options) { + return NOOP_OBSERVABLE_GAUGE_METRIC; + }; + /** + * @see {@link Meter.createObservableCounter} + */ + NoopMeter.prototype.createObservableCounter = function (_name, _options) { + return NOOP_OBSERVABLE_COUNTER_METRIC; + }; + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + NoopMeter.prototype.createObservableUpDownCounter = function (_name, _options) { + return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; + }; + /** + * @see {@link Meter.addBatchObservableCallback} + */ + NoopMeter.prototype.addBatchObservableCallback = function (_callback, _observables) { }; + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + NoopMeter.prototype.removeBatchObservableCallback = function (_callback) { }; + return NoopMeter; +}()); +export { NoopMeter }; +var NoopMetric = /** @class */ (function () { + function NoopMetric() { + } + return NoopMetric; +}()); +export { NoopMetric }; +var NoopCounterMetric = /** @class */ (function (_super) { + __extends(NoopCounterMetric, _super); + function NoopCounterMetric() { + return _super !== null && _super.apply(this, arguments) || this; + } + NoopCounterMetric.prototype.add = function (_value, _attributes) { }; + return NoopCounterMetric; +}(NoopMetric)); +export { NoopCounterMetric }; +var NoopUpDownCounterMetric = /** @class */ (function (_super) { + __extends(NoopUpDownCounterMetric, _super); + function NoopUpDownCounterMetric() { + return _super !== null && _super.apply(this, arguments) || this; + } + NoopUpDownCounterMetric.prototype.add = function (_value, _attributes) { }; + return NoopUpDownCounterMetric; +}(NoopMetric)); +export { NoopUpDownCounterMetric }; +var NoopHistogramMetric = /** @class */ (function (_super) { + __extends(NoopHistogramMetric, _super); + function NoopHistogramMetric() { + return _super !== null && _super.apply(this, arguments) || this; + } + NoopHistogramMetric.prototype.record = function (_value, _attributes) { }; + return NoopHistogramMetric; +}(NoopMetric)); +export { NoopHistogramMetric }; +var NoopObservableMetric = /** @class */ (function () { + function NoopObservableMetric() { + } + NoopObservableMetric.prototype.addCallback = function (_callback) { }; + NoopObservableMetric.prototype.removeCallback = function (_callback) { }; + return NoopObservableMetric; +}()); +export { NoopObservableMetric }; +var NoopObservableCounterMetric = /** @class */ (function (_super) { + __extends(NoopObservableCounterMetric, _super); + function NoopObservableCounterMetric() { + return _super !== null && _super.apply(this, arguments) || this; + } + return NoopObservableCounterMetric; +}(NoopObservableMetric)); +export { NoopObservableCounterMetric }; +var NoopObservableGaugeMetric = /** @class */ (function (_super) { + __extends(NoopObservableGaugeMetric, _super); + function NoopObservableGaugeMetric() { + return _super !== null && _super.apply(this, arguments) || this; + } + return NoopObservableGaugeMetric; +}(NoopObservableMetric)); +export { NoopObservableGaugeMetric }; +var NoopObservableUpDownCounterMetric = /** @class */ (function (_super) { + __extends(NoopObservableUpDownCounterMetric, _super); + function NoopObservableUpDownCounterMetric() { + return _super !== null && _super.apply(this, arguments) || this; + } + return NoopObservableUpDownCounterMetric; +}(NoopObservableMetric)); +export { NoopObservableUpDownCounterMetric }; +export var NOOP_METER = new NoopMeter(); +// Synchronous instruments +export var NOOP_COUNTER_METRIC = new NoopCounterMetric(); +export var NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); +export var NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); +// Asynchronous instruments +export var NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); +export var NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); +export var NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); +/** + * Create a no-op Meter + */ +export function createNoopMeter() { + return NOOP_METER; +} +//# sourceMappingURL=NoopMeter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js.map b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js.map new file mode 100644 index 0000000..b0e3b22 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopMeter.js","sourceRoot":"","sources":["../../../src/metrics/NoopMeter.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;;;;AAiBH;;;GAGG;AACH;IACE;IAAe,CAAC;IAEhB;;OAEG;IACH,mCAAe,GAAf,UAAgB,KAAa,EAAE,QAAwB;QACrD,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED;;OAEG;IACH,iCAAa,GAAb,UAAc,KAAa,EAAE,QAAwB;QACnD,OAAO,mBAAmB,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,uCAAmB,GAAnB,UAAoB,KAAa,EAAE,QAAwB;QACzD,OAAO,2BAA2B,CAAC;IACrC,CAAC;IAED;;OAEG;IACH,yCAAqB,GAArB,UACE,KAAa,EACb,QAAwB;QAExB,OAAO,4BAA4B,CAAC;IACtC,CAAC;IAED;;OAEG;IACH,2CAAuB,GAAvB,UACE,KAAa,EACb,QAAwB;QAExB,OAAO,8BAA8B,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,iDAA6B,GAA7B,UACE,KAAa,EACb,QAAwB;QAExB,OAAO,sCAAsC,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,8CAA0B,GAA1B,UACE,SAAkC,EAClC,YAA0B,IACnB,CAAC;IAEV;;OAEG;IACH,iDAA6B,GAA7B,UAA8B,SAAkC,IAAS,CAAC;IAC5E,gBAAC;AAAD,CAAC,AAlED,IAkEC;;AAED;IAAA;IAAyB,CAAC;IAAD,iBAAC;AAAD,CAAC,AAA1B,IAA0B;;AAE1B;IAAuC,qCAAU;IAAjD;;IAEA,CAAC;IADC,+BAAG,GAAH,UAAI,MAAc,EAAE,WAA6B,IAAS,CAAC;IAC7D,wBAAC;AAAD,CAAC,AAFD,CAAuC,UAAU,GAEhD;;AAED;IACU,2CAAU;IADpB;;IAKA,CAAC;IADC,qCAAG,GAAH,UAAI,MAAc,EAAE,WAA6B,IAAS,CAAC;IAC7D,8BAAC;AAAD,CAAC,AALD,CACU,UAAU,GAInB;;AAED;IAAyC,uCAAU;IAAnD;;IAEA,CAAC;IADC,oCAAM,GAAN,UAAO,MAAc,EAAE,WAA6B,IAAS,CAAC;IAChE,0BAAC;AAAD,CAAC,AAFD,CAAyC,UAAU,GAElD;;AAED;IAAA;IAIA,CAAC;IAHC,0CAAW,GAAX,UAAY,SAA6B,IAAG,CAAC;IAE7C,6CAAc,GAAd,UAAe,SAA6B,IAAG,CAAC;IAClD,2BAAC;AAAD,CAAC,AAJD,IAIC;;AAED;IACU,+CAAoB;IAD9B;;IAEgC,CAAC;IAAD,kCAAC;AAAD,CAAC,AAFjC,CACU,oBAAoB,GACG;;AAEjC;IACU,6CAAoB;IAD9B;;IAE8B,CAAC;IAAD,gCAAC;AAAD,CAAC,AAF/B,CACU,oBAAoB,GACC;;AAE/B;IACU,qDAAoB;IAD9B;;IAEsC,CAAC;IAAD,wCAAC;AAAD,CAAC,AAFvC,CACU,oBAAoB,GACS;;AAEvC,MAAM,CAAC,IAAM,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;AAE1C,0BAA0B;AAC1B,MAAM,CAAC,IAAM,mBAAmB,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAC3D,MAAM,CAAC,IAAM,qBAAqB,GAAG,IAAI,mBAAmB,EAAE,CAAC;AAC/D,MAAM,CAAC,IAAM,2BAA2B,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAEzE,2BAA2B;AAC3B,MAAM,CAAC,IAAM,8BAA8B,GAAG,IAAI,2BAA2B,EAAE,CAAC;AAChF,MAAM,CAAC,IAAM,4BAA4B,GAAG,IAAI,yBAAyB,EAAE,CAAC;AAC5E,MAAM,CAAC,IAAM,sCAAsC,GACjD,IAAI,iCAAiC,EAAE,CAAC;AAE1C;;GAEG;AACH,MAAM,UAAU,eAAe;IAC7B,OAAO,UAAU,CAAC;AACpB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter } from './Meter';\nimport {\n BatchObservableCallback,\n Counter,\n Histogram,\n MetricOptions,\n ObservableCallback,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n MetricAttributes,\n Observable,\n} from './Metric';\n\n/**\n * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses\n * constant NoopMetrics for all of its methods.\n */\nexport class NoopMeter implements Meter {\n constructor() {}\n\n /**\n * @see {@link Meter.createHistogram}\n */\n createHistogram(_name: string, _options?: MetricOptions): Histogram {\n return NOOP_HISTOGRAM_METRIC;\n }\n\n /**\n * @see {@link Meter.createCounter}\n */\n createCounter(_name: string, _options?: MetricOptions): Counter {\n return NOOP_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createUpDownCounter}\n */\n createUpDownCounter(_name: string, _options?: MetricOptions): UpDownCounter {\n return NOOP_UP_DOWN_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableGauge}\n */\n createObservableGauge(\n _name: string,\n _options?: MetricOptions\n ): ObservableGauge {\n return NOOP_OBSERVABLE_GAUGE_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableCounter}\n */\n createObservableCounter(\n _name: string,\n _options?: MetricOptions\n ): ObservableCounter {\n return NOOP_OBSERVABLE_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableUpDownCounter}\n */\n createObservableUpDownCounter(\n _name: string,\n _options?: MetricOptions\n ): ObservableUpDownCounter {\n return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.addBatchObservableCallback}\n */\n addBatchObservableCallback(\n _callback: BatchObservableCallback,\n _observables: Observable[]\n ): void {}\n\n /**\n * @see {@link Meter.removeBatchObservableCallback}\n */\n removeBatchObservableCallback(_callback: BatchObservableCallback): void {}\n}\n\nexport class NoopMetric {}\n\nexport class NoopCounterMetric extends NoopMetric implements Counter {\n add(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopUpDownCounterMetric\n extends NoopMetric\n implements UpDownCounter\n{\n add(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopHistogramMetric extends NoopMetric implements Histogram {\n record(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopObservableMetric {\n addCallback(_callback: ObservableCallback) {}\n\n removeCallback(_callback: ObservableCallback) {}\n}\n\nexport class NoopObservableCounterMetric\n extends NoopObservableMetric\n implements ObservableCounter {}\n\nexport class NoopObservableGaugeMetric\n extends NoopObservableMetric\n implements ObservableGauge {}\n\nexport class NoopObservableUpDownCounterMetric\n extends NoopObservableMetric\n implements ObservableUpDownCounter {}\n\nexport const NOOP_METER = new NoopMeter();\n\n// Synchronous instruments\nexport const NOOP_COUNTER_METRIC = new NoopCounterMetric();\nexport const NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric();\nexport const NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric();\n\n// Asynchronous instruments\nexport const NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric();\nexport const NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric();\nexport const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC =\n new NoopObservableUpDownCounterMetric();\n\n/**\n * Create a no-op Meter\n */\nexport function createNoopMeter(): Meter {\n return NOOP_METER;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.d.ts new file mode 100644 index 0000000..8b51bc5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.d.ts @@ -0,0 +1,11 @@ +import { Meter, MeterOptions } from './Meter'; +import { MeterProvider } from './MeterProvider'; +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +export declare class NoopMeterProvider implements MeterProvider { + getMeter(_name: string, _version?: string, _options?: MeterOptions): Meter; +} +export declare const NOOP_METER_PROVIDER: NoopMeterProvider; +//# sourceMappingURL=NoopMeterProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js new file mode 100644 index 0000000..75de3c0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NOOP_METER } from './NoopMeter'; +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +var NoopMeterProvider = /** @class */ (function () { + function NoopMeterProvider() { + } + NoopMeterProvider.prototype.getMeter = function (_name, _version, _options) { + return NOOP_METER; + }; + return NoopMeterProvider; +}()); +export { NoopMeterProvider }; +export var NOOP_METER_PROVIDER = new NoopMeterProvider(); +//# sourceMappingURL=NoopMeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js.map b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js.map new file mode 100644 index 0000000..28b3120 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopMeterProvider.js","sourceRoot":"","sources":["../../../src/metrics/NoopMeterProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;GAGG;AACH;IAAA;IAIA,CAAC;IAHC,oCAAQ,GAAR,UAAS,KAAa,EAAE,QAAiB,EAAE,QAAuB;QAChE,OAAO,UAAU,CAAC;IACpB,CAAC;IACH,wBAAC;AAAD,CAAC,AAJD,IAIC;;AAED,MAAM,CAAC,IAAM,mBAAmB,GAAG,IAAI,iBAAiB,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from './Meter';\nimport { MeterProvider } from './MeterProvider';\nimport { NOOP_METER } from './NoopMeter';\n\n/**\n * An implementation of the {@link MeterProvider} which returns an impotent Meter\n * for all calls to `getMeter`\n */\nexport class NoopMeterProvider implements MeterProvider {\n getMeter(_name: string, _version?: string, _options?: MeterOptions): Meter {\n return NOOP_METER;\n }\n}\n\nexport const NOOP_METER_PROVIDER = new NoopMeterProvider();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.d.ts b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.d.ts new file mode 100644 index 0000000..26563f9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.d.ts @@ -0,0 +1,31 @@ +import { MetricAttributes, Observable } from './Metric'; +/** + * Interface that is being used in callback function for Observable Metric. + */ +export interface ObservableResult { + /** + * Observe a measurement of the value associated with the given attributes. + * + * @param value The value to be observed. + * @param attributes The attributes associated with the value. If more than + * one values associated with the same attributes values, SDK may pick the + * last one or simply drop the entire observable result. + */ + observe(this: ObservableResult, value: number, attributes?: AttributesTypes): void; +} +/** + * Interface that is being used in batch observable callback function. + */ +export interface BatchObservableResult { + /** + * Observe a measurement of the value associated with the given attributes. + * + * @param metric The observable metric to be observed. + * @param value The value to be observed. + * @param attributes The attributes associated with the value. If more than + * one values associated with the same attributes values, SDK may pick the + * last one or simply drop the entire observable result. + */ + observe(this: BatchObservableResult, metric: Observable, value: number, attributes?: AttributesTypes): void; +} +//# sourceMappingURL=ObservableResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js new file mode 100644 index 0000000..7985d26 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=ObservableResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js.map b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js.map new file mode 100644 index 0000000..b6b0c7c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ObservableResult.js","sourceRoot":"","sources":["../../../src/metrics/ObservableResult.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { MetricAttributes, Observable } from './Metric';\n\n/**\n * Interface that is being used in callback function for Observable Metric.\n */\nexport interface ObservableResult<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Observe a measurement of the value associated with the given attributes.\n *\n * @param value The value to be observed.\n * @param attributes The attributes associated with the value. If more than\n * one values associated with the same attributes values, SDK may pick the\n * last one or simply drop the entire observable result.\n */\n observe(\n this: ObservableResult,\n value: number,\n attributes?: AttributesTypes\n ): void;\n}\n\n/**\n * Interface that is being used in batch observable callback function.\n */\nexport interface BatchObservableResult<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Observe a measurement of the value associated with the given attributes.\n *\n * @param metric The observable metric to be observed.\n * @param value The value to be observed.\n * @param attributes The attributes associated with the value. If more than\n * one values associated with the same attributes values, SDK may pick the\n * last one or simply drop the entire observable result.\n */\n observe(\n this: BatchObservableResult,\n metric: Observable,\n value: number,\n attributes?: AttributesTypes\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts new file mode 100644 index 0000000..e73fd73 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js new file mode 100644 index 0000000..52f985e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Updates to this file should also be replicated to @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +export var _globalThis = typeof globalThis === 'object' + ? globalThis + : typeof self === 'object' + ? self + : typeof window === 'object' + ? window + : typeof global === 'object' + ? global + : {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map new file mode 100644 index 0000000..665e3fc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,6EAA6E;AAE7E;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AAC9E,MAAM,CAAC,IAAM,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ;IAC5B,CAAC,CAAC,UAAU;IACZ,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ;QAC1B,CAAC,CAAC,IAAI;QACN,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ;YAC5B,CAAC,CAAC,MAAM;YACR,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ;gBAC5B,CAAC,CAAC,MAAM;gBACR,CAAC,CAAE,EAAwB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object'\n ? globalThis\n : typeof self === 'object'\n ? self\n : typeof window === 'object'\n ? window\n : typeof global === 'object'\n ? global\n : ({} as typeof globalThis);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts new file mode 100644 index 0000000..ba20e12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js new file mode 100644 index 0000000..efcad2e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map new file mode 100644 index 0000000..07adcd9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts new file mode 100644 index 0000000..90595da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js b/node_modules/@opentelemetry/api/build/esm/platform/index.js new file mode 100644 index 0000000..c0df125 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './node'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/index.js.map new file mode 100644 index 0000000..9494c53 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,QAAQ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts new file mode 100644 index 0000000..e3c83e5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js new file mode 100644 index 0000000..feb9700 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js @@ -0,0 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +export var _globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map new file mode 100644 index 0000000..5911136 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,gEAAgE;AAChE,oEAAoE;AACpE,MAAM,CAAC,IAAM,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts new file mode 100644 index 0000000..ba20e12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js new file mode 100644 index 0000000..efcad2e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map new file mode 100644 index 0000000..f279718 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation-api.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation-api.d.ts new file mode 100644 index 0000000..e12b51b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation-api.d.ts @@ -0,0 +1,4 @@ +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +//# sourceMappingURL=propagation-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation-api.js b/node_modules/@opentelemetry/api/build/esm/propagation-api.js new file mode 100644 index 0000000..df8d5b9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export var propagation = PropagationAPI.getInstance(); +//# sourceMappingURL=propagation-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation-api.js.map b/node_modules/@opentelemetry/api/build/esm/propagation-api.js.map new file mode 100644 index 0000000..2375317 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation-api.js","sourceRoot":"","sources":["../../src/propagation-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,qCAAqC;AACrC,MAAM,CAAC,IAAM,WAAW,GAAG,cAAc,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 0000000..398021f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js new file mode 100644 index 0000000..8e62901 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * No-op implementations of {@link TextMapPropagator}. + */ +var NoopTextMapPropagator = /** @class */ (function () { + function NoopTextMapPropagator() { + } + /** Noop inject function does nothing */ + NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { }; + /** Noop extract function does nothing and returns the input context */ + NoopTextMapPropagator.prototype.extract = function (context, _carrier) { + return context; + }; + NoopTextMapPropagator.prototype.fields = function () { + return []; + }; + return NoopTextMapPropagator; +}()); +export { NoopTextMapPropagator }; +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 0000000..40be566 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH;;GAEG;AACH;IAAA;IAUA,CAAC;IATC,wCAAwC;IACxC,sCAAM,GAAN,UAAO,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,uCAAO,GAAP,UAAQ,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,sCAAM,GAAN;QACE,OAAO,EAAE,CAAC;IACZ,CAAC;IACH,4BAAC;AAAD,CAAC,AAVD,IAUC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts new file mode 100644 index 0000000..dc39367 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js new file mode 100644 index 0000000..c5f5311 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var defaultTextMapGetter = { + get: function (carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys: function (carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +export var defaultTextMapSetter = { + set: function (carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map new file mode 100644 index 0000000..9de7d06 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAkGH,MAAM,CAAC,IAAM,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,YAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEF,MAAM,CAAC,IAAM,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace-api.d.ts b/node_modules/@opentelemetry/api/build/esm/trace-api.d.ts new file mode 100644 index 0000000..b4751a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace-api.d.ts @@ -0,0 +1,4 @@ +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +//# sourceMappingURL=trace-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace-api.js b/node_modules/@opentelemetry/api/build/esm/trace-api.js new file mode 100644 index 0000000..57506e5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export var trace = TraceAPI.getInstance(); +//# sourceMappingURL=trace-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace-api.js.map b/node_modules/@opentelemetry/api/build/esm/trace-api.js.map new file mode 100644 index 0000000..bd11c06 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace-api.js","sourceRoot":"","sources":["../../src/trace-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,+BAA+B;AAC/B,MAAM,CAAC,IAAM,KAAK,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts new file mode 100644 index 0000000..883cf91 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js new file mode 100644 index 0000000..ba2e0ff --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js @@ -0,0 +1,62 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPAN_CONTEXT } from './invalid-span-constants'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NonRecordingSpan = /** @class */ (function () { + function NonRecordingSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NonRecordingSpan.prototype.spanContext = function () { + return this._spanContext; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for NonRecordingSpan. + NonRecordingSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; + return NonRecordingSpan; +}()); +export { NonRecordingSpan }; +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map new file mode 100644 index 0000000..6af5c8d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAKhE;;;;GAIG;AACH;IACE,0BACmB,YAAgD;QAAhD,6BAAA,EAAA,mCAAgD;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,sCAAW,GAAX;QACE,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,uCAAY,GAAZ,UAAa,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,wCAAa,GAAb,UAAc,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,mCAAQ,GAAR,UAAS,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,oCAAS,GAAT,UAAU,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,qCAAU,GAAV,UAAW,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,8BAAG,GAAH,UAAI,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,sCAAW,GAAX;QACE,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,0CAAe,GAAf,UAAgB,UAAqB,EAAE,KAAiB,IAAS,CAAC;IACpE,uBAAC;AAAD,CAAC,AA7CD,IA6CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts new file mode 100644 index 0000000..0e059c9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js new file mode 100644 index 0000000..5a3a969 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ContextAPI } from '../api/context'; +import { getSpanContext, setSpan } from '../trace/context-utils'; +import { NonRecordingSpan } from './NonRecordingSpan'; +import { isSpanContextValid } from './spancontext-utils'; +var contextApi = ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +var NoopTracer = /** @class */ (function () { + function NoopTracer() { + } + // startSpan starts a noop span. + NoopTracer.prototype.startSpan = function (name, options, context) { + if (context === void 0) { context = contextApi.active(); } + var root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan(); + } + var parentFromContext = context && getSpanContext(context); + if (isSpanContext(parentFromContext) && + isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan(); + } + }; + NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { + var opts; + var ctx; + var fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + var parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); + var span = this.startSpan(name, opts, parentContext); + var contextWithSpanSet = setSpan(parentContext, span); + return contextApi.with(contextWithSpanSet, fn, undefined, span); + }; + return NoopTracer; +}()); +export { NoopTracer }; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map new file mode 100644 index 0000000..ae82938 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,wBAAwB,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAKzD,IAAM,UAAU,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;AAE5C;;GAEG;AACH;IAAA;IAoEA,CAAC;IAnEC,gCAAgC;IAChC,8BAAS,GAAT,UACE,IAAY,EACZ,OAAqB,EACrB,OAA6B;QAA7B,wBAAA,EAAA,UAAU,UAAU,CAAC,MAAM,EAAE;QAE7B,IAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;QAED,IAAM,iBAAiB,GAAG,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,kBAAkB,CAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,gBAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,oCAAe,GAAf,UACE,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,IAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,UAAU,CAAC,MAAM,EAAE,CAAC;QACjD,IAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,IAAM,kBAAkB,GAAG,OAAO,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,UAAU,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAClE,CAAC;IACH,iBAAC;AAAD,CAAC,AApED,IAoEC;;AAED,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst contextApi = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(\n name: string,\n options?: SpanOptions,\n context = contextApi.active()\n ): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? contextApi.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return contextApi.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts new file mode 100644 index 0000000..ec0fe79 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js new file mode 100644 index 0000000..14d44c2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +var NoopTracerProvider = /** @class */ (function () { + function NoopTracerProvider() { + } + NoopTracerProvider.prototype.getTracer = function (_name, _version, _options) { + return new NoopTracer(); + }; + return NoopTracerProvider; +}()); +export { NoopTracerProvider }; +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map new file mode 100644 index 0000000..2705ab4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAK1C;;;;;GAKG;AACH;IAAA;IAQA,CAAC;IAPC,sCAAS,GAAT,UACE,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;IACH,yBAAC;AAAD,CAAC,AARD,IAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts new file mode 100644 index 0000000..116cc5c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js new file mode 100644 index 0000000..341991b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +var NOOP_TRACER = new NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +var ProxyTracer = /** @class */ (function () { + function ProxyTracer(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + ProxyTracer.prototype.startSpan = function (name, options, context) { + return this._getTracer().startSpan(name, options, context); + }; + ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { + var tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + }; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + ProxyTracer.prototype._getTracer = function () { + if (this._delegate) { + return this._delegate; + } + var tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + }; + return ProxyTracer; +}()); +export { ProxyTracer }; +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map new file mode 100644 index 0000000..0511ae8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAM1C,IAAM,WAAW,GAAG,IAAI,UAAU,EAAE,CAAC;AAErC;;GAEG;AACH;IAIE,qBACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,+BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,qCAAe,GAAf,UACE,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,IAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,gCAAU,GAAlB;QACE,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,IAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAC7C,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,OAAO,CACb,CAAC;QAEF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACH,kBAAC;AAAD,CAAC,AA/CD,IA+CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(\n this.name,\n this.version,\n this.options\n );\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts new file mode 100644 index 0000000..ee7eafa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js new file mode 100644 index 0000000..3cc735c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ProxyTracer } from './ProxyTracer'; +import { NoopTracerProvider } from './NoopTracerProvider'; +var NOOP_TRACER_PROVIDER = new NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +var ProxyTracerProvider = /** @class */ (function () { + function ProxyTracerProvider() { + } + /** + * Get a {@link ProxyTracer} + */ + ProxyTracerProvider.prototype.getTracer = function (name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version, options)); + }; + ProxyTracerProvider.prototype.getDelegate = function () { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + }; + /** + * Set the delegate tracer provider + */ + ProxyTracerProvider.prototype.setDelegate = function (delegate) { + this._delegate = delegate; + }; + ProxyTracerProvider.prototype.getDelegateTracer = function (name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + }; + return ProxyTracerProvider; +}()); +export { ProxyTracerProvider }; +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map new file mode 100644 index 0000000..fc9179b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAG1D,IAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH;IAAA;IA+BA,CAAC;IA5BC;;OAEG;IACH,uCAAS,GAAT,UAAU,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,WAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,yCAAW,GAAX;;QACE,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,yCAAW,GAAX,UAAY,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,+CAAiB,GAAjB,UACE,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IACH,0BAAC;AAAD,CAAC,AA/BD,IA+BC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts new file mode 100644 index 0000000..c847eaf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts @@ -0,0 +1,31 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js new file mode 100644 index 0000000..22a60a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map new file mode 100644 index 0000000..66719b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts new file mode 100644 index 0000000..f2bb495 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts @@ -0,0 +1,49 @@ +import { SpanAttributes } from './attributes'; +import { TraceState } from './trace_state'; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; + /** + * A {@link TraceState} that will be associated with the {@link Span} through + * the new {@link SpanContext}. Samplers SHOULD return the TraceState from + * the passed-in {@link Context} if they do not intend to change it. Leaving + * the value undefined will also leave the TraceState unchanged. + */ + traceState?: TraceState; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js new file mode 100644 index 0000000..be65741 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision || (SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map new file mode 100644 index 0000000..fd549c8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH;;;;GAIG;AACH,MAAM,CAAN,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,KAAhB,gBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { TraceState } from './trace_state';\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n /**\n * A {@link TraceState} that will be associated with the {@link Span} through\n * the new {@link SpanContext}. Samplers SHOULD return the TraceState from\n * the passed-in {@link Context} if they do not intend to change it. Leaving\n * the value undefined will also leave the TraceState unchanged.\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts new file mode 100644 index 0000000..c804568 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js new file mode 100644 index 0000000..06b42b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map new file mode 100644 index 0000000..9132a33 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts new file mode 100644 index 0000000..a2a5d2a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js new file mode 100644 index 0000000..6f1b9a3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map new file mode 100644 index 0000000..2b02be7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts new file mode 100644 index 0000000..f35f794 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts @@ -0,0 +1,41 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Gets the span from the current context, if one exists. + */ +export declare function getActiveSpan(): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js new file mode 100644 index 0000000..4d776c3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js @@ -0,0 +1,73 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { createContextKey } from '../context/context'; +import { NonRecordingSpan } from './NonRecordingSpan'; +import { ContextAPI } from '../api/context'; +/** + * span key + */ +var SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +/** + * Gets the span from the current context, if one exists. + */ +export function getActiveSpan() { + return getSpan(ContextAPI.getInstance().active()); +} +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan(spanContext)); +} +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map new file mode 100644 index 0000000..86103d1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5C;;GAEG;AACH,IAAM,QAAQ,GAAG,gBAAgB,CAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa;IAC3B,OAAO,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC;AACpD,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { ContextAPI } from '../api/context';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Gets the span from the current context, if one exists.\n */\nexport function getActiveSpan(): Span | undefined {\n return getSpan(ContextAPI.getInstance().active());\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts new file mode 100644 index 0000000..9ed5ecb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js new file mode 100644 index 0000000..7514069 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { validateKey, validateValue } from './tracestate-validators'; +var MAX_TRACE_STATE_ITEMS = 32; +var MAX_TRACE_STATE_LEN = 512; +var LIST_MEMBERS_SEPARATOR = ','; +var LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +var TraceStateImpl = /** @class */ (function () { + function TraceStateImpl(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + TraceStateImpl.prototype.set = function (key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + var traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + }; + TraceStateImpl.prototype.unset = function (key) { + var traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + }; + TraceStateImpl.prototype.get = function (key) { + return this._internalState.get(key); + }; + TraceStateImpl.prototype.serialize = function () { + var _this = this; + return this._keys() + .reduce(function (agg, key) { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + _this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + }; + TraceStateImpl.prototype._parse = function (rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce(function (agg, part) { + var listMember = part.trim(); // Optional Whitespace (OWS) handling + var i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + var key = listMember.slice(0, i); + var value = listMember.slice(i + 1, part.length); + if (validateKey(key) && validateValue(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + }; + TraceStateImpl.prototype._keys = function () { + return Array.from(this._internalState.keys()).reverse(); + }; + TraceStateImpl.prototype._clone = function () { + var traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + }; + return TraceStateImpl; +}()); +export { TraceStateImpl }; +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map new file mode 100644 index 0000000..102cb4f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAErE,IAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,IAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,IAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,IAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH;IAGE,wBAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,8BAAK,GAAL,UAAM,GAAW;QACf,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,kCAAS,GAAT;QAAA,iBAOC;QANC,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,UAAC,GAAa,EAAE,GAAG;YACzB,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,KAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,+BAAM,GAAd,UAAe,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,UAAC,GAAwB,EAAE,IAAY;YAC7C,IAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,IAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,IAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,IAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,WAAW,CAAC,GAAG,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,8BAAK,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,+BAAM,GAAd;QACE,IAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;IACH,qBAAC;AAAD,CAAC,AA5ED,IA4EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts new file mode 100644 index 0000000..4917f99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js new file mode 100644 index 0000000..1d3f14b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +var VALID_KEY = "[a-z]" + VALID_KEY_CHAR_RANGE + "{0,255}"; +var VALID_VENDOR_KEY = "[a-z0-9]" + VALID_KEY_CHAR_RANGE + "{0,240}@[a-z]" + VALID_KEY_CHAR_RANGE + "{0,13}"; +var VALID_KEY_REGEX = new RegExp("^(?:" + VALID_KEY + "|" + VALID_VENDOR_KEY + ")$"); +var VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +var INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map new file mode 100644 index 0000000..72899fc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,IAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,IAAM,SAAS,GAAG,UAAQ,oBAAoB,YAAS,CAAC;AACxD,IAAM,gBAAgB,GAAG,aAAW,oBAAoB,qBAAgB,oBAAoB,WAAQ,CAAC;AACrG,IAAM,eAAe,GAAG,IAAI,MAAM,CAAC,SAAO,SAAS,SAAI,gBAAgB,OAAI,CAAC,CAAC;AAC7E,IAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,IAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts new file mode 100644 index 0000000..e3b51fe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js new file mode 100644 index 0000000..feea469 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceStateImpl } from './tracestate-impl'; +export function createTraceState(rawTraceState) { + return new TraceStateImpl(rawTraceState); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map new file mode 100644 index 0000000..91ba3d1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAEnD,MAAM,UAAU,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,cAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts new file mode 100644 index 0000000..e32dab9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js new file mode 100644 index 0000000..36dc1d6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js @@ -0,0 +1,24 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceFlags } from './trace_flags'; +export var INVALID_SPANID = '0000000000000000'; +export var INVALID_TRACEID = '00000000000000000000000000000000'; +export var INVALID_SPAN_CONTEXT = { + traceId: INVALID_TRACEID, + spanId: INVALID_SPANID, + traceFlags: TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map new file mode 100644 index 0000000..970a3f4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,MAAM,CAAC,IAAM,cAAc,GAAG,kBAAkB,CAAC;AACjD,MAAM,CAAC,IAAM,eAAe,GAAG,kCAAkC,CAAC;AAClE,MAAM,CAAC,IAAM,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,eAAe;IACxB,MAAM,EAAE,cAAc;IACtB,UAAU,EAAE,UAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts new file mode 100644 index 0000000..8fc0106 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts @@ -0,0 +1,26 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; + /** Count of attributes of the link that were dropped due to collection limits */ + droppedAttributesCount?: number; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js b/node_modules/@opentelemetry/api/build/esm/trace/link.js new file mode 100644 index 0000000..7c8accb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js.map b/node_modules/@opentelemetry/api/build/esm/trace/link.js.map new file mode 100644 index 0000000..c10b714 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n /** Count of attributes of the link that were dropped due to collection limits */\n droppedAttributesCount?: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts new file mode 100644 index 0000000..c5f2814 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js b/node_modules/@opentelemetry/api/build/esm/trace/span.js new file mode 100644 index 0000000..f41c7f6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span.js.map new file mode 100644 index 0000000..0a4f102 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts new file mode 100644 index 0000000..f30933a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js new file mode 100644 index 0000000..1bb88b0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map new file mode 100644 index 0000000..dbf0bfe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts new file mode 100644 index 0000000..a89846f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js new file mode 100644 index 0000000..1119df9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js @@ -0,0 +1,43 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map new file mode 100644 index 0000000..deb6be7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,KAAR,QAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts new file mode 100644 index 0000000..f191111 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js new file mode 100644 index 0000000..88545bb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants'; +import { NonRecordingSpan } from './NonRecordingSpan'; +var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +export function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID; +} +export function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID; +} +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export function wrapSpanContext(spanContext) { + return new NonRecordingSpan(spanContext); +} +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map new file mode 100644 index 0000000..a4dc6c2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3E,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,IAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,IAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,MAAM,UAAU,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,eAAe,CAAC;AAC1E,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,cAAc,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts new file mode 100644 index 0000000..ab19a68 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js b/node_modules/@opentelemetry/api/build/esm/trace/status.js new file mode 100644 index 0000000..5ee55e4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.js @@ -0,0 +1,20 @@ +/** + * An enumeration of status codes. + */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js.map b/node_modules/@opentelemetry/api/build/esm/trace/status.js.map new file mode 100644 index 0000000..af7e7d7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":"AAsBA;;GAEG;AACH,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts new file mode 100644 index 0000000..11288ba --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js new file mode 100644 index 0000000..8a7b000 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags || (TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map new file mode 100644 index 0000000..2ea8680 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,KAAV,UAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts new file mode 100644 index 0000000..f275b8b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js new file mode 100644 index 0000000..a6c368f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map new file mode 100644 index 0000000..64a3d7a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts new file mode 100644 index 0000000..2509089 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js new file mode 100644 index 0000000..ad066dc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map new file mode 100644 index 0000000..77f6ae9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts new file mode 100644 index 0000000..f3bbccf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js new file mode 100644 index 0000000..470a3a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map new file mode 100644 index 0000000..70365af --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts new file mode 100644 index 0000000..9b2f7a9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js new file mode 100644 index 0000000..adf432a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map new file mode 100644 index 0000000..bfc1cbd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.d.ts b/node_modules/@opentelemetry/api/build/esm/version.d.ts new file mode 100644 index 0000000..5088f1b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.8.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js b/node_modules/@opentelemetry/api/build/esm/version.js new file mode 100644 index 0000000..ba175da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.js @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// this is autogenerated file, see scripts/version-update.js +export var VERSION = '1.8.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js.map b/node_modules/@opentelemetry/api/build/esm/version.js.map new file mode 100644 index 0000000..9b04da1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,4DAA4D;AAC5D,MAAM,CAAC,IAAM,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.8.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/context.d.ts b/node_modules/@opentelemetry/api/build/esnext/api/context.d.ts new file mode 100644 index 0000000..61caee8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/context.js b/node_modules/@opentelemetry/api/build/esnext/api/context.js new file mode 100644 index 0000000..c672a41 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/context.js @@ -0,0 +1,77 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopContextManager } from '../context/NoopContextManager'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { DiagAPI } from './diag'; +const API_NAME = 'context'; +const NOOP_CONTEXT_MANAGER = new NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export class ContextAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Context API */ + static getInstance() { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + } + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager) { + return registerGlobal(API_NAME, contextManager, DiagAPI.instance()); + } + /** + * Get the currently active context + */ + active() { + return this._getContextManager().active(); + } + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with(context, fn, thisArg, ...args) { + return this._getContextManager().with(context, fn, thisArg, ...args); + } + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context, target) { + return this._getContextManager().bind(context, target); + } + _getContextManager() { + return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + } + /** Disable and remove the global context manager */ + disable() { + this._getContextManager().disable(); + unregisterGlobal(API_NAME, DiagAPI.instance()); + } +} +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/context.js.map b/node_modules/@opentelemetry/api/build/esnext/api/context.js.map new file mode 100644 index 0000000..5ad9a01 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AAEnE,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,MAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,MAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH,MAAM,OAAO,UAAU;IAGrB,+FAA+F;IAC/F,gBAAuB,CAAC;IAExB,oDAAoD;IAC7C,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,uBAAuB,CAAC,cAA8B;QAC3D,OAAO,cAAc,CAAC,QAAQ,EAAE,cAAc,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,MAAM;QACX,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CACT,OAAgB,EAChB,EAAK,EACL,OAA8B,EAC9B,GAAG,IAAO;QAEV,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IACvE,CAAC;IAED;;;;;OAKG;IACI,IAAI,CAAI,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,kBAAkB;QACxB,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,OAAO;QACZ,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/diag.d.ts b/node_modules/@opentelemetry/api/build/esnext/api/diag.d.ts new file mode 100644 index 0000000..131db17 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/diag.d.ts @@ -0,0 +1,30 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLoggerApi } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger, DiagLoggerApi { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + setLogger: DiagLoggerApi['setLogger']; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/diag.js b/node_modules/@opentelemetry/api/build/esnext/api/diag.js new file mode 100644 index 0000000..8798a39 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/diag.js @@ -0,0 +1,89 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagComponentLogger } from '../diag/ComponentLogger'; +import { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger'; +import { DiagLogLevel, } from '../diag/types'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +const API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export class DiagAPI { + /** + * Private internal constructor + * @private + */ + constructor() { + function _logProxy(funcName) { + return function (...args) { + const logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName](...args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + const self = this; + // DiagAPI specific functions + const setLogger = (logger, optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }) => { + var _a, _b, _c; + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + if (typeof optionsOrLogLevel === 'number') { + optionsOrLogLevel = { + logLevel: optionsOrLogLevel, + }; + } + const oldLogger = getGlobal('diag'); + const newLogger = createLogLevelDiagLogger((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : DiagLogLevel.INFO, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { + const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; + oldLogger.warn(`Current logger will be overwritten from ${stack}`); + newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); + } + return registerGlobal('diag', newLogger, self, true); + }; + self.setLogger = setLogger; + self.disable = () => { + unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = (options) => { + return new DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + static instance() { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + } +} +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/diag.js.map b/node_modules/@opentelemetry/api/build/esnext/api/diag.js.map new file mode 100644 index 0000000..448d05c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAC9D,OAAO,EAAE,wBAAwB,EAAE,MAAM,iCAAiC,CAAC;AAC3E,OAAO,EAKL,YAAY,GACb,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAElC,MAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH,MAAM,OAAO,OAAO;IAYlB;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO,UAAU,GAAG,IAAI;gBACtB,MAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,MAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,MAAM,SAAS,GAA+B,CAC5C,MAAM,EACN,iBAAiB,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,IAAI,EAAE,EACnD,EAAE;;YACF,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,MAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBACzC,iBAAiB,GAAG;oBAClB,QAAQ,EAAE,iBAAiB;iBAC5B,CAAC;aACH;YAED,MAAM,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;YACpC,MAAM,SAAS,GAAG,wBAAwB,CACxC,MAAA,iBAAiB,CAAC,QAAQ,mCAAI,YAAY,CAAC,IAAI,EAC/C,MAAM,CACP,CAAC;YACF,kFAAkF;YAClF,IAAI,SAAS,IAAI,CAAC,iBAAiB,CAAC,uBAAuB,EAAE;gBAC3D,MAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,2CAA2C,KAAK,EAAE,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,6DAA6D,KAAK,EAAE,CACrE,CAAC;aACH;YAED,OAAO,cAAc,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,CAAC,OAAO,GAAG,GAAG,EAAE;YAClB,gBAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,CAAC,OAA+B,EAAE,EAAE;YAC/D,OAAO,IAAI,mBAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAjFD,oDAAoD;IAC7C,MAAM,CAAC,QAAQ;QACpB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;CA+FF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLoggerApi,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger, DiagLoggerApi {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n const setLogger: DiagLoggerApi['setLogger'] = (\n logger,\n optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n if (typeof optionsOrLogLevel === 'number') {\n optionsOrLogLevel = {\n logLevel: optionsOrLogLevel,\n };\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(\n optionsOrLogLevel.logLevel ?? DiagLogLevel.INFO,\n logger\n );\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.setLogger = setLogger;\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n public setLogger!: DiagLoggerApi['setLogger'];\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/metrics.d.ts b/node_modules/@opentelemetry/api/build/esnext/api/metrics.d.ts new file mode 100644 index 0000000..5adc145 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/metrics.d.ts @@ -0,0 +1,28 @@ +import { Meter, MeterOptions } from '../metrics/Meter'; +import { MeterProvider } from '../metrics/MeterProvider'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +export declare class MetricsAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Metrics API */ + static getInstance(): MetricsAPI; + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider: MeterProvider): boolean; + /** + * Returns the global meter provider. + */ + getMeterProvider(): MeterProvider; + /** + * Returns a meter from the global meter provider. + */ + getMeter(name: string, version?: string, options?: MeterOptions): Meter; + /** Remove the global meter provider */ + disable(): void; +} +//# sourceMappingURL=metrics.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/metrics.js b/node_modules/@opentelemetry/api/build/esnext/api/metrics.js new file mode 100644 index 0000000..7864926 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/metrics.js @@ -0,0 +1,57 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { DiagAPI } from './diag'; +const API_NAME = 'metrics'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +export class MetricsAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Metrics API */ + static getInstance() { + if (!this._instance) { + this._instance = new MetricsAPI(); + } + return this._instance; + } + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider) { + return registerGlobal(API_NAME, provider, DiagAPI.instance()); + } + /** + * Returns the global meter provider. + */ + getMeterProvider() { + return getGlobal(API_NAME) || NOOP_METER_PROVIDER; + } + /** + * Returns a meter from the global meter provider. + */ + getMeter(name, version, options) { + return this.getMeterProvider().getMeter(name, version, options); + } + /** Remove the global meter provider */ + disable() { + unregisterGlobal(API_NAME, DiagAPI.instance()); + } +} +//# sourceMappingURL=metrics.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/metrics.js.map b/node_modules/@opentelemetry/api/build/esnext/api/metrics.js.map new file mode 100644 index 0000000..03c2872 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/metrics.js.map @@ -0,0 +1 @@ +{"version":3,"file":"metrics.js","sourceRoot":"","sources":["../../../src/api/metrics.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,MAAM,QAAQ,GAAG,SAAS,CAAC;AAE3B;;GAEG;AACH,MAAM,OAAO,UAAU;IAGrB,+FAA+F;IAC/F,gBAAuB,CAAC;IAExB,oDAAoD;IAC7C,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAAC,QAAuB;QACnD,OAAO,cAAc,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;OAEG;IACI,gBAAgB;QACrB,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,mBAAmB,CAAC;IACpD,CAAC;IAED;;OAEG;IACI,QAAQ,CACb,IAAY,EACZ,OAAgB,EAChB,OAAsB;QAEtB,OAAO,IAAI,CAAC,gBAAgB,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAClE,CAAC;IAED,uCAAuC;IAChC,OAAO;QACZ,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from '../metrics/Meter';\nimport { MeterProvider } from '../metrics/MeterProvider';\nimport { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'metrics';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Metrics API\n */\nexport class MetricsAPI {\n private static _instance?: MetricsAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Metrics API */\n public static getInstance(): MetricsAPI {\n if (!this._instance) {\n this._instance = new MetricsAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global meter provider.\n * Returns true if the meter provider was successfully registered, else false.\n */\n public setGlobalMeterProvider(provider: MeterProvider): boolean {\n return registerGlobal(API_NAME, provider, DiagAPI.instance());\n }\n\n /**\n * Returns the global meter provider.\n */\n public getMeterProvider(): MeterProvider {\n return getGlobal(API_NAME) || NOOP_METER_PROVIDER;\n }\n\n /**\n * Returns a meter from the global meter provider.\n */\n public getMeter(\n name: string,\n version?: string,\n options?: MeterOptions\n ): Meter {\n return this.getMeterProvider().getMeter(name, version, options);\n }\n\n /** Remove the global meter provider */\n public disable(): void {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/esnext/api/propagation.d.ts new file mode 100644 index 0000000..a22d24d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/propagation.d.ts @@ -0,0 +1,49 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + getActiveBaggage: typeof getActiveBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/propagation.js b/node_modules/@opentelemetry/api/build/esnext/api/propagation.js new file mode 100644 index 0000000..93507eb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/propagation.js @@ -0,0 +1,85 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator'; +import { defaultTextMapGetter, defaultTextMapSetter, } from '../propagation/TextMapPropagator'; +import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage, } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +import { DiagAPI } from './diag'; +const API_NAME = 'propagation'; +const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export class PropagationAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this.createBaggage = createBaggage; + this.getBaggage = getBaggage; + this.getActiveBaggage = getActiveBaggage; + this.setBaggage = setBaggage; + this.deleteBaggage = deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + static getInstance() { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + } + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator) { + return registerGlobal(API_NAME, propagator, DiagAPI.instance()); + } + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context, carrier, setter = defaultTextMapSetter) { + return this._getGlobalPropagator().inject(context, carrier, setter); + } + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context, carrier, getter = defaultTextMapGetter) { + return this._getGlobalPropagator().extract(context, carrier, getter); + } + /** + * Return a list of all fields which may be used by the propagator. + */ + fields() { + return this._getGlobalPropagator().fields(); + } + /** Remove the global propagator */ + disable() { + unregisterGlobal(API_NAME, DiagAPI.instance()); + } + _getGlobalPropagator() { + return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + } +} +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/propagation.js.map b/node_modules/@opentelemetry/api/build/esnext/api/propagation.js.map new file mode 100644 index 0000000..a0673fe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,qBAAqB,EAAE,MAAM,sCAAsC,CAAC;AAC7E,OAAO,EACL,oBAAoB,EACpB,oBAAoB,GAIrB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,UAAU,EACV,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,MAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,MAAM,wBAAwB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH,MAAM,OAAO,cAAc;IAGzB,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,aAAa,CAAC;QAE9B,eAAU,GAAG,UAAU,CAAC;QAExB,qBAAgB,GAAG,gBAAgB,CAAC;QAEpC,eAAU,GAAG,UAAU,CAAC;QAExB,kBAAa,GAAG,aAAa,CAAC;IAtEd,CAAC;IAExB,uDAAuD;IAChD,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,mBAAmB,CAAC,UAA6B;QACtD,OAAO,cAAc,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,MAAM,CACX,OAAgB,EAChB,OAAgB,EAChB,SAAiC,oBAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,OAAO,CACZ,OAAgB,EAChB,OAAgB,EAChB,SAAiC,oBAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,MAAM;QACX,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,OAAO;QACZ,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAYO,oBAAoB;QAC1B,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n getActiveBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public getActiveBaggage = getActiveBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/trace.d.ts b/node_modules/@opentelemetry/api/build/esnext/api/trace.d.ts new file mode 100644 index 0000000..df59fd2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/trace.d.ts @@ -0,0 +1,40 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getActiveSpan: typeof getActiveSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/trace.js b/node_modules/@opentelemetry/api/build/esnext/api/trace.js new file mode 100644 index 0000000..6003620 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/trace.js @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { ProxyTracerProvider } from '../trace/ProxyTracerProvider'; +import { isSpanContextValid, wrapSpanContext, } from '../trace/spancontext-utils'; +import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext, } from '../trace/context-utils'; +import { DiagAPI } from './diag'; +const API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export class TraceAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this._proxyTracerProvider = new ProxyTracerProvider(); + this.wrapSpanContext = wrapSpanContext; + this.isSpanContextValid = isSpanContextValid; + this.deleteSpan = deleteSpan; + this.getSpan = getSpan; + this.getActiveSpan = getActiveSpan; + this.getSpanContext = getSpanContext; + this.setSpan = setSpan; + this.setSpanContext = setSpanContext; + } + /** Get the singleton instance of the Trace API */ + static getInstance() { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + } + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider) { + const success = registerGlobal(API_NAME, this._proxyTracerProvider, DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + } + /** + * Returns the global tracer provider. + */ + getTracerProvider() { + return getGlobal(API_NAME) || this._proxyTracerProvider; + } + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name, version) { + return this.getTracerProvider().getTracer(name, version); + } + /** Remove the global tracer provider */ + disable() { + unregisterGlobal(API_NAME, DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider(); + } +} +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/trace.js.map b/node_modules/@opentelemetry/api/build/esnext/api/trace.js.map new file mode 100644 index 0000000..1521877 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,kBAAkB,EAClB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAGpC,OAAO,EACL,UAAU,EACV,aAAa,EACb,OAAO,EACP,cAAc,EACd,OAAO,EACP,cAAc,GACf,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,MAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH,MAAM,OAAO,QAAQ;IAKnB,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,eAAe,CAAC;QAElC,uBAAkB,GAAG,kBAAkB,CAAC;QAExC,eAAU,GAAG,UAAU,CAAC;QAExB,YAAO,GAAG,OAAO,CAAC;QAElB,kBAAa,GAAG,aAAa,CAAC;QAE9B,mBAAc,GAAG,cAAc,CAAC;QAEhC,YAAO,GAAG,OAAO,CAAC;QAElB,mBAAc,GAAG,cAAc,CAAC;IA9DhB,CAAC;IAExB,kDAAkD;IAC3C,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,uBAAuB,CAAC,QAAwB;QACrD,MAAM,OAAO,GAAG,cAAc,CAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,OAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,iBAAiB;QACtB,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,SAAS,CAAC,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,OAAO;QACZ,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;IACxD,CAAC;CAiBF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getActiveSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getActiveSpan = getActiveSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.d.ts new file mode 100644 index 0000000..23750eb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.d.ts @@ -0,0 +1,29 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getActiveBaggage(): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js new file mode 100644 index 0000000..9cd0fe7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js @@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ContextAPI } from '../api/context'; +import { createContextKey } from '../context/context'; +/** + * Baggage key + */ +const BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +export function getActiveBaggage() { + return getBaggage(ContextAPI.getInstance().active()); +} +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js.map new file mode 100644 index 0000000..c39d666 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAC5C,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD;;GAEG;AACH,MAAM,WAAW,GAAG,gBAAgB,CAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB;IAC9B,OAAO,UAAU,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC;AACvD,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Retrieve the current baggage from the active/current context\n *\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getActiveBaggage(): Baggage | undefined {\n return getBaggage(ContextAPI.getInstance().active());\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.d.ts new file mode 100644 index 0000000..e6b4554 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js new file mode 100644 index 0000000..774d1f8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export class BaggageImpl { + constructor(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + getEntry(key) { + const entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + } + getAllEntries() { + return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); + } + setEntry(key, entry) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + } + removeEntry(key) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + } + removeEntries(...keys) { + const newBaggage = new BaggageImpl(this._entries); + for (const key of keys) { + newBaggage._entries.delete(key); + } + return newBaggage; + } + clear() { + return new BaggageImpl(); + } +} +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js.map new file mode 100644 index 0000000..74c6a4c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,MAAM,OAAO,WAAW;IAGtB,YAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,QAAQ,CAAC,GAAW;QAClB,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,aAAa;QACX,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACrE,CAAC;IAED,QAAQ,CAAC,GAAW,EAAE,KAAmB;QACvC,MAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,WAAW,CAAC,GAAW;QACrB,MAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,aAAa,CAAC,GAAG,IAAc;QAC7B,MAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;YACtB,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACjC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,KAAK;QACH,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.d.ts new file mode 100644 index 0000000..9cd991c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js new file mode 100644 index 0000000..22f5b25 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js.map new file mode 100644 index 0000000..0cc1abd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/esnext/baggage/types.d.ts new file mode 100644 index 0000000..32fa0ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/types.js b/node_modules/@opentelemetry/api/build/esnext/baggage/types.js new file mode 100644 index 0000000..928faad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/types.js.map b/node_modules/@opentelemetry/api/build/esnext/baggage/types.js.map new file mode 100644 index 0000000..ae80c19 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.d.ts new file mode 100644 index 0000000..9955d9e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js new file mode 100644 index 0000000..b444883 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js @@ -0,0 +1,46 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagAPI } from '../api/diag'; +import { BaggageImpl } from './internal/baggage-impl'; +import { baggageEntryMetadataSymbol } from './internal/symbol'; +const diag = DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export function createBaggage(entries = {}) { + return new BaggageImpl(new Map(Object.entries(entries))); +} +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); + str = ''; + } + return { + __TYPE__: baggageEntryMetadataSymbol, + toString() { + return str; + }, + }; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js.map new file mode 100644 index 0000000..0a0228d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAG/D,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,UAAwC,EAAE;IAE1C,OAAO,IAAI,WAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,qDAAqD,OAAO,GAAG,EAAE,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,0BAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.d.ts new file mode 100644 index 0000000..19994fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js new file mode 100644 index 0000000..dbb1e49 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js.map new file mode 100644 index 0000000..2649c94 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/esnext/common/Exception.d.ts new file mode 100644 index 0000000..e175a7f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Exception.js b/node_modules/@opentelemetry/api/build/esnext/common/Exception.js new file mode 100644 index 0000000..6522a8e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Exception.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Exception.js.map b/node_modules/@opentelemetry/api/build/esnext/common/Exception.js.map new file mode 100644 index 0000000..989dd3d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Time.d.ts b/node_modules/@opentelemetry/api/build/esnext/common/Time.d.ts new file mode 100644 index 0000000..cc3c502 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Time.js b/node_modules/@opentelemetry/api/build/esnext/common/Time.js new file mode 100644 index 0000000..2abdf58 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Time.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Time.js.map b/node_modules/@opentelemetry/api/build/esnext/common/Time.js.map new file mode 100644 index 0000000..ae124f0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context-api.d.ts b/node_modules/@opentelemetry/api/build/esnext/context-api.d.ts new file mode 100644 index 0000000..650f4ee --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context-api.d.ts @@ -0,0 +1,4 @@ +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +//# sourceMappingURL=context-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context-api.js b/node_modules/@opentelemetry/api/build/esnext/context-api.js new file mode 100644 index 0000000..5827043 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export const context = ContextAPI.getInstance(); +//# sourceMappingURL=context-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context-api.js.map b/node_modules/@opentelemetry/api/build/esnext/context-api.js.map new file mode 100644 index 0000000..fc655d3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-api.js","sourceRoot":"","sources":["../../src/context-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAC3C,iCAAiC;AACjC,MAAM,CAAC,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.d.ts new file mode 100644 index 0000000..48a1659 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js new file mode 100644 index 0000000..14824a6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ROOT_CONTEXT } from './context'; +export class NoopContextManager { + active() { + return ROOT_CONTEXT; + } + with(_context, fn, thisArg, ...args) { + return fn.call(thisArg, ...args); + } + bind(_context, target) { + return target; + } + enable() { + return this; + } + disable() { + return this; + } +} +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js.map new file mode 100644 index 0000000..8ee9f8f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAGzC,MAAM,OAAO,kBAAkB;IAC7B,MAAM;QACJ,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,IAAI,CACF,QAAuB,EACvB,EAAK,EACL,OAA8B,EAC9B,GAAG,IAAO;QAEV,OAAO,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IACnC,CAAC;IAED,IAAI,CAAI,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM;QACJ,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,OAAO,IAAI,CAAC;IACd,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/context.d.ts b/node_modules/@opentelemetry/api/build/esnext/context/context.d.ts new file mode 100644 index 0000000..8be0259 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/context.js b/node_modules/@opentelemetry/api/build/esnext/context/context.js new file mode 100644 index 0000000..a95ecf0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/context.js @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** Get a key to uniquely identify a context value */ +export function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +class BaseContext { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + constructor(parentContext) { + // for minification + const self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = (key) => self._currentContext.get(key); + self.setValue = (key, value) => { + const context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = (key) => { + const context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } +} +/** The root context is used as the default parent context when there is no active context */ +export const ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/context.js.map b/node_modules/@opentelemetry/api/build/esnext/context/context.js.map new file mode 100644 index 0000000..a35b9a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,qDAAqD;AACrD,MAAM,UAAU,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AAED,MAAM,WAAW;IAGf;;;;OAIG;IACH,YAAY,aAAoC;QAC9C,mBAAmB;QACnB,MAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,CAAC,GAAW,EAAE,EAAE,CAAC,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,CAAC,GAAW,EAAE,KAAc,EAAW,EAAE;YACvD,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,CAAC,GAAW,EAAW,EAAE;YAC1C,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;CAyBF;AAED,6FAA6F;AAC7F,MAAM,CAAC,MAAM,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/types.d.ts b/node_modules/@opentelemetry/api/build/esnext/context/types.d.ts new file mode 100644 index 0000000..7e86632 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/types.js b/node_modules/@opentelemetry/api/build/esnext/context/types.js new file mode 100644 index 0000000..928faad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/types.js.map b/node_modules/@opentelemetry/api/build/esnext/context/types.js.map new file mode 100644 index 0000000..d438aa3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag-api.d.ts b/node_modules/@opentelemetry/api/build/esnext/diag-api.d.ts new file mode 100644 index 0000000..d82fdb1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag-api.d.ts @@ -0,0 +1,9 @@ +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +//# sourceMappingURL=diag-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag-api.js b/node_modules/@opentelemetry/api/build/esnext/diag-api.js new file mode 100644 index 0000000..41d2658 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag-api.js @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export const diag = DiagAPI.instance(); +//# sourceMappingURL=diag-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag-api.js.map b/node_modules/@opentelemetry/api/build/esnext/diag-api.js.map new file mode 100644 index 0000000..d0be8cb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag-api.js","sourceRoot":"","sources":["../../src/diag-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACrC;;;;;GAKG;AACH,MAAM,CAAC,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { DiagAPI } from './api/diag';\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.d.ts new file mode 100644 index 0000000..f060950 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js new file mode 100644 index 0000000..1e21dbe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js @@ -0,0 +1,55 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal } from '../internal/global-utils'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export class DiagComponentLogger { + constructor(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + debug(...args) { + return logProxy('debug', this._namespace, args); + } + error(...args) { + return logProxy('error', this._namespace, args); + } + info(...args) { + return logProxy('info', this._namespace, args); + } + warn(...args) { + return logProxy('warn', this._namespace, args); + } + verbose(...args) { + return logProxy('verbose', this._namespace, args); + } +} +function logProxy(funcName, namespace, args) { + const logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName](...args); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js.map new file mode 100644 index 0000000..c65f5ff --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGrD;;;;;;;;GAQG;AACH,MAAM,OAAO,mBAAmB;IAG9B,YAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,KAAK,CAAC,GAAG,IAAW;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,KAAK,CAAC,GAAG,IAAW;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,IAAI,CAAC,GAAG,IAAW;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,IAAI,CAAC,GAAG,IAAW;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,OAAO,CAAC,GAAG,IAAW;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;CACF;AAED,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,MAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,GAAI,IAAoC,CAAC,CAAC;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.d.ts new file mode 100644 index 0000000..fa3db1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js new file mode 100644 index 0000000..d81ea30 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export class DiagConsoleLogger { + constructor() { + function _consoleFunc(funcName) { + return function (...args) { + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + let theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (let i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } +} +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js.map new file mode 100644 index 0000000..7498dd0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,MAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH,MAAM,OAAO,iBAAiB;IAC5B;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO,UAAU,GAAG,IAAI;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;CAkCF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.d.ts new file mode 100644 index 0000000..890b9f1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js new file mode 100644 index 0000000..6abf21b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagLogLevel } from '../types'; +export function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < DiagLogLevel.NONE) { + maxLevel = DiagLogLevel.NONE; + } + else if (maxLevel > DiagLogLevel.ALL) { + maxLevel = DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + const theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', DiagLogLevel.ERROR), + warn: _filterFunc('warn', DiagLogLevel.WARN), + info: _filterFunc('info', DiagLogLevel.INFO), + debug: _filterFunc('debug', DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE), + }; +} +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js.map new file mode 100644 index 0000000..ac34251 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAA+B,YAAY,EAAE,MAAM,UAAU,CAAC;AAErE,MAAM,UAAU,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,YAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,YAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.d.ts new file mode 100644 index 0000000..ac71ee3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js new file mode 100644 index 0000000..7d5ba63 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js.map new file mode 100644 index 0000000..bf20aea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,MAAM,UAAU,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/types.d.ts b/node_modules/@opentelemetry/api/build/esnext/diag/types.d.ts new file mode 100644 index 0000000..e992cc5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/types.d.ts @@ -0,0 +1,100 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +export interface DiagLoggerOptions { + /** + * The {@link DiagLogLevel} used to filter logs sent to the logger. + * + * @defaultValue DiagLogLevel.INFO + */ + logLevel?: DiagLogLevel; + /** + * Setting this value to `true` will suppress the warning message normally emitted when registering a logger when another logger is already registered. + */ + suppressOverrideMessage?: boolean; +} +export interface DiagLoggerApi { + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param options - A {@link DiagLoggerOptions} object. If not provided, default values will be set. + * @returns `true` if the logger was successfully registered, else `false` + */ + setLogger(logger: DiagLogger, options?: DiagLoggerOptions): boolean; + /** + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param logLevel - The {@link DiagLogLevel} used to filter logs sent to the logger. If not provided it will default to {@link DiagLogLevel.INFO}. + * @returns `true` if the logger was successfully registered, else `false` + */ + setLogger(logger: DiagLogger, logLevel?: DiagLogLevel): boolean; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/types.js b/node_modules/@opentelemetry/api/build/esnext/diag/types.js new file mode 100644 index 0000000..306585e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/types.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel || (DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/types.js.map b/node_modules/@opentelemetry/api/build/esnext/diag/types.js.map new file mode 100644 index 0000000..6578cce --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA+CH;;;;GAIG;AACH,MAAM,CAAN,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,KAAZ,YAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n\nexport interface DiagLoggerOptions {\n /**\n * The {@link DiagLogLevel} used to filter logs sent to the logger.\n *\n * @defaultValue DiagLogLevel.INFO\n */\n logLevel?: DiagLogLevel;\n\n /**\n * Setting this value to `true` will suppress the warning message normally emitted when registering a logger when another logger is already registered.\n */\n suppressOverrideMessage?: boolean;\n}\n\nexport interface DiagLoggerApi {\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - The {@link DiagLogger} instance to set as the default logger.\n * @param options - A {@link DiagLoggerOptions} object. If not provided, default values will be set.\n * @returns `true` if the logger was successfully registered, else `false`\n */\n setLogger(logger: DiagLogger, options?: DiagLoggerOptions): boolean;\n\n /**\n *\n * @param logger - The {@link DiagLogger} instance to set as the default logger.\n * @param logLevel - The {@link DiagLogLevel} used to filter logs sent to the logger. If not provided it will default to {@link DiagLogLevel.INFO}.\n * @returns `true` if the logger was successfully registered, else `false`\n */\n setLogger(logger: DiagLogger, logLevel?: DiagLogLevel): boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/index.d.ts b/node_modules/@opentelemetry/api/build/esnext/experimental/index.d.ts new file mode 100644 index 0000000..bec3965 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/index.d.ts @@ -0,0 +1,3 @@ +export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; +export { SugaredSpanOptions } from './trace/SugaredOptions'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/index.js b/node_modules/@opentelemetry/api/build/esnext/experimental/index.js new file mode 100644 index 0000000..8400e49 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/index.js.map b/node_modules/@opentelemetry/api/build/esnext/experimental/index.js.map new file mode 100644 index 0000000..d1699d9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/experimental/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport { wrapTracer, SugaredTracer } from './trace/SugaredTracer';\nexport { SugaredSpanOptions } from './trace/SugaredOptions';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.d.ts b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.d.ts new file mode 100644 index 0000000..89040af --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.d.ts @@ -0,0 +1,13 @@ +import { Span, SpanOptions } from '../../'; +/** + * Options needed for span creation + */ +export interface SugaredSpanOptions extends SpanOptions { + /** + * function to overwrite default exception behavior to record the exception. No exceptions should be thrown in the function. + * @param e Error which triggered this exception + * @param span current span from context + */ + onException?: (e: Error, span: Span) => void; +} +//# sourceMappingURL=SugaredOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js new file mode 100644 index 0000000..0c6a2bd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=SugaredOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js.map b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js.map new file mode 100644 index 0000000..2a18a56 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SugaredOptions.js","sourceRoot":"","sources":["../../../../src/experimental/trace/SugaredOptions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Span, SpanOptions } from '../../';\n\n/**\n * Options needed for span creation\n */\nexport interface SugaredSpanOptions extends SpanOptions {\n /**\n * function to overwrite default exception behavior to record the exception. No exceptions should be thrown in the function.\n * @param e Error which triggered this exception\n * @param span current span from context\n */\n onException?: (e: Error, span: Span) => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.d.ts b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.d.ts new file mode 100644 index 0000000..1ba7da9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.d.ts @@ -0,0 +1,64 @@ +import { SugaredSpanOptions } from './SugaredOptions'; +import { Context, Span, Tracer } from '../../'; +/** + * return a new SugaredTracer created from the supplied one + * @param tracer + */ +export declare function wrapTracer(tracer: Tracer): SugaredTracer; +export declare class SugaredTracer implements Tracer { + private readonly _tracer; + constructor(tracer: Tracer); + startActiveSpan: Tracer['startActiveSpan']; + startSpan: Tracer['startSpan']; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally, the new span gets set in context and this context is activated + * for the duration of the function call. + * The span will be closed after the function has executed. + * If an exception occurs, it is recorded, the status is set to ERROR and the exception is rethrown. + * + * @param name The name of the span + * @param [options] SugaredSpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.withActiveSpan('op', span => { + * // do some work + * }); + * @example + * const something = await tracer.withActiveSpan('op', span => { + * // do some async work + * }); + */ + withActiveSpan ReturnType>(name: string, fn: F): ReturnType; + withActiveSpan ReturnType>(name: string, options: SugaredSpanOptions, fn: F): ReturnType; + withActiveSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; + /** + * Starts a new {@link Span} and ends it after execution of fn without setting it on context. + * The span will be closed after the function has executed. + * If an exception occurs, it is recorded, the status is et to ERROR and rethrown. + * + * This method does NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SugaredSpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns Span The newly created span + * @example + * const something = tracer.withSpan('op', span => { + * // do some work + * }); + * @example + * const something = await tracer.withSpan('op', span => { + * // do some async work + * }); + */ + withSpan ReturnType>(name: string, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=SugaredTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js new file mode 100644 index 0000000..a1edc7f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js @@ -0,0 +1,88 @@ +import { context, SpanStatusCode } from '../../'; +const defaultOnException = (e, span) => { + span.recordException(e); + span.setStatus({ + code: SpanStatusCode.ERROR, + }); +}; +/** + * return a new SugaredTracer created from the supplied one + * @param tracer + */ +export function wrapTracer(tracer) { + return new SugaredTracer(tracer); +} +export class SugaredTracer { + constructor(tracer) { + this._tracer = tracer; + this.startSpan = tracer.startSpan.bind(this._tracer); + this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer); + } + withActiveSpan(name, arg2, arg3, arg4) { + const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); + return this._tracer.startActiveSpan(name, opts, ctx, (span) => handleFn(span, opts, fn)); + } + withSpan(name, arg2, arg3, arg4) { + const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); + const span = this._tracer.startSpan(name, opts, ctx); + return handleFn(span, opts, fn); + } +} +/** + * Massages parameters of withSpan and withActiveSpan to allow signature overwrites + * @param arg + * @param arg2 + * @param arg3 + */ +function massageParams(arg, arg2, arg3) { + let opts; + let ctx; + let fn; + if (!arg2 && !arg3) { + fn = arg; + } + else if (!arg3) { + opts = arg; + fn = arg2; + } + else { + opts = arg; + ctx = arg2; + fn = arg3; + } + opts = opts !== null && opts !== void 0 ? opts : {}; + ctx = ctx !== null && ctx !== void 0 ? ctx : context.active(); + return { opts, ctx, fn }; +} +/** + * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling + * @param span + * @param opts + * @param fn + */ +function handleFn(span, opts, fn) { + var _a; + const onException = (_a = opts.onException) !== null && _a !== void 0 ? _a : defaultOnException; + const errorHandler = (e) => { + onException(e, span); + span.end(); + throw e; + }; + try { + const ret = fn(span); + // if fn is an async function, attach a recordException and spanEnd callback to the promise + if (typeof (ret === null || ret === void 0 ? void 0 : ret.then) === 'function') { + return ret.then(val => { + span.end(); + return val; + }, errorHandler); + } + span.end(); + return ret; + } + catch (e) { + // add throw to signal the compiler that this will throw in the inner scope + throw errorHandler(e); + } +} +//# sourceMappingURL=SugaredTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js.map b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js.map new file mode 100644 index 0000000..600ea54 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SugaredTracer.js","sourceRoot":"","sources":["../../../../src/experimental/trace/SugaredTracer.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,OAAO,EAAiB,cAAc,EAAU,MAAM,QAAQ,CAAC;AAExE,MAAM,kBAAkB,GAAG,CAAC,CAAQ,EAAE,IAAU,EAAE,EAAE;IAClD,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,CAAC,SAAS,CAAC;QACb,IAAI,EAAE,cAAc,CAAC,KAAK;KAC3B,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;GAGG;AACH,MAAM,UAAU,UAAU,CAAC,MAAc;IACvC,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,CAAC;AACnC,CAAC;AAED,MAAM,OAAO,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACnE,CAAC;IA0CD,cAAc,CACZ,IAAY,EACZ,IAA4B,EAC5B,IAAkB,EAClB,IAAQ;QAER,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,aAAa,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAE1D,OAAO,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,IAAU,EAAE,EAAE,CAClE,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAC,CACR,CAAC;IACrB,CAAC;IA4CD,QAAQ,CACN,IAAY,EACZ,IAA4B,EAC5B,IAAkB,EAClB,IAAQ;QAER,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,aAAa,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAE1D,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAkB,CAAC;IACnD,CAAC;CACF;AAED;;;;;GAKG;AACH,SAAS,aAAa,CACpB,GAA2B,EAC3B,IAAkB,EAClB,IAAQ;IAER,IAAI,IAAoC,CAAC;IACzC,IAAI,GAAwB,CAAC;IAC7B,IAAI,EAAK,CAAC;IAEV,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE;QAClB,EAAE,GAAG,GAAQ,CAAC;KACf;SAAM,IAAI,CAAC,IAAI,EAAE;QAChB,IAAI,GAAG,GAAyB,CAAC;QACjC,EAAE,GAAG,IAAS,CAAC;KAChB;SAAM;QACL,IAAI,GAAG,GAAyB,CAAC;QACjC,GAAG,GAAG,IAAe,CAAC;QACtB,EAAE,GAAG,IAAS,CAAC;KAChB;IACD,IAAI,GAAG,IAAI,aAAJ,IAAI,cAAJ,IAAI,GAAI,EAAE,CAAC;IAClB,GAAG,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAE9B,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;AAC3B,CAAC;AAED;;;;;GAKG;AACH,SAAS,QAAQ,CACf,IAAU,EACV,IAAwB,EACxB,EAAK;;IAEL,MAAM,WAAW,GAAG,MAAA,IAAI,CAAC,WAAW,mCAAI,kBAAkB,CAAC;IAC3D,MAAM,YAAY,GAAG,CAAC,CAAQ,EAAE,EAAE;QAChC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,MAAM,CAAC,CAAC;IACV,CAAC,CAAC;IAEF,IAAI;QACF,MAAM,GAAG,GAAG,EAAE,CAAC,IAAI,CAA2B,CAAC;QAC/C,2FAA2F;QAC3F,IAAI,OAAO,CAAA,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,CAAA,KAAK,UAAU,EAAE;YACnC,OAAO,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACpB,IAAI,CAAC,GAAG,EAAE,CAAC;gBACX,OAAO,GAAG,CAAC;YACb,CAAC,EAAE,YAAY,CAAkB,CAAC;SACnC;QACD,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,OAAO,GAAoB,CAAC;KAC7B;IAAC,OAAO,CAAC,EAAE;QACV,2EAA2E;QAC3E,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { SugaredSpanOptions } from './SugaredOptions';\nimport { context, Context, Span, SpanStatusCode, Tracer } from '../../';\n\nconst defaultOnException = (e: Error, span: Span) => {\n span.recordException(e);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n });\n};\n\n/**\n * return a new SugaredTracer created from the supplied one\n * @param tracer\n */\nexport function wrapTracer(tracer: Tracer): SugaredTracer {\n return new SugaredTracer(tracer);\n}\n\nexport class SugaredTracer implements Tracer {\n private readonly _tracer: Tracer;\n\n constructor(tracer: Tracer) {\n this._tracer = tracer;\n this.startSpan = tracer.startSpan.bind(this._tracer);\n this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer);\n }\n\n startActiveSpan: Tracer['startActiveSpan'];\n startSpan: Tracer['startSpan'];\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally, the new span gets set in context and this context is activated\n * for the duration of the function call.\n * The span will be closed after the function has executed.\n * If an exception occurs, it is recorded, the status is set to ERROR and the exception is rethrown.\n *\n * @param name The name of the span\n * @param [options] SugaredSpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.withActiveSpan('op', span => {\n * // do some work\n * });\n * @example\n * const something = await tracer.withActiveSpan('op', span => {\n * // do some async work\n * });\n */\n withActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n arg2: F | SugaredSpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType {\n const { opts, ctx, fn } = massageParams(arg2, arg3, arg4);\n\n return this._tracer.startActiveSpan(name, opts, ctx, (span: Span) =>\n handleFn(span, opts, fn)\n ) as ReturnType;\n }\n\n /**\n * Starts a new {@link Span} and ends it after execution of fn without setting it on context.\n * The span will be closed after the function has executed.\n * If an exception occurs, it is recorded, the status is et to ERROR and rethrown.\n *\n * This method does NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SugaredSpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns Span The newly created span\n * @example\n * const something = tracer.withSpan('op', span => {\n * // do some work\n * });\n * @example\n * const something = await tracer.withSpan('op', span => {\n * // do some async work\n * });\n */\n withSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n arg2: SugaredSpanOptions | F,\n arg3?: Context | F,\n arg4?: F\n ): ReturnType {\n const { opts, ctx, fn } = massageParams(arg2, arg3, arg4);\n\n const span = this._tracer.startSpan(name, opts, ctx);\n return handleFn(span, opts, fn) as ReturnType;\n }\n}\n\n/**\n * Massages parameters of withSpan and withActiveSpan to allow signature overwrites\n * @param arg\n * @param arg2\n * @param arg3\n */\nfunction massageParams ReturnType>(\n arg: F | SugaredSpanOptions,\n arg2?: F | Context,\n arg3?: F\n) {\n let opts: SugaredSpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (!arg2 && !arg3) {\n fn = arg as F;\n } else if (!arg3) {\n opts = arg as SugaredSpanOptions;\n fn = arg2 as F;\n } else {\n opts = arg as SugaredSpanOptions;\n ctx = arg2 as Context;\n fn = arg3 as F;\n }\n opts = opts ?? {};\n ctx = ctx ?? context.active();\n\n return { opts, ctx, fn };\n}\n\n/**\n * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling\n * @param span\n * @param opts\n * @param fn\n */\nfunction handleFn ReturnType>(\n span: Span,\n opts: SugaredSpanOptions,\n fn: F\n): ReturnType {\n const onException = opts.onException ?? defaultOnException;\n const errorHandler = (e: Error) => {\n onException(e, span);\n span.end();\n throw e;\n };\n\n try {\n const ret = fn(span) as Promise>;\n // if fn is an async function, attach a recordException and spanEnd callback to the promise\n if (typeof ret?.then === 'function') {\n return ret.then(val => {\n span.end();\n return val;\n }, errorHandler) as ReturnType;\n }\n span.end();\n return ret as ReturnType;\n } catch (e) {\n // add throw to signal the compiler that this will throw in the inner scope\n throw errorHandler(e);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/index.d.ts b/node_modules/@opentelemetry/api/build/esnext/index.d.ts new file mode 100644 index 0000000..89ae493 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/index.d.ts @@ -0,0 +1,54 @@ +export { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export { Exception } from './common/Exception'; +export { HrTime, TimeInput } from './common/Time'; +export { Attributes, AttributeValue } from './common/Attributes'; +export { createContextKey, ROOT_CONTEXT } from './context/context'; +export { Context, ContextManager } from './context/types'; +export type { ContextAPI } from './api/context'; +export { DiagConsoleLogger } from './diag/consoleLogger'; +export { DiagLogFunction, DiagLogger, DiagLogLevel, ComponentLoggerOptions, DiagLoggerOptions, } from './diag/types'; +export type { DiagAPI } from './api/diag'; +export { createNoopMeter } from './metrics/NoopMeter'; +export { MeterOptions, Meter } from './metrics/Meter'; +export { MeterProvider } from './metrics/MeterProvider'; +export { ValueType, Counter, Histogram, MetricOptions, Observable, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, BatchObservableCallback, MetricAdvice, MetricAttributes, MetricAttributeValue, ObservableCallback, } from './metrics/Metric'; +export { BatchObservableResult, ObservableResult, } from './metrics/ObservableResult'; +export type { MetricsAPI } from './api/metrics'; +export { TextMapPropagator, TextMapSetter, TextMapGetter, defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; +export type { PropagationAPI } from './api/propagation'; +export { SpanAttributes, SpanAttributeValue } from './trace/attributes'; +export { Link } from './trace/link'; +export { ProxyTracer, TracerDelegator } from './trace/ProxyTracer'; +export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; +export { Sampler } from './trace/Sampler'; +export { SamplingDecision, SamplingResult } from './trace/SamplingResult'; +export { SpanContext } from './trace/span_context'; +export { SpanKind } from './trace/span_kind'; +export { Span } from './trace/span'; +export { SpanOptions } from './trace/SpanOptions'; +export { SpanStatus, SpanStatusCode } from './trace/status'; +export { TraceFlags } from './trace/trace_flags'; +export { TraceState } from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export { TracerProvider } from './trace/tracer_provider'; +export { Tracer } from './trace/tracer'; +export { TracerOptions } from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export type { TraceAPI } from './api/trace'; +import { context } from './context-api'; +import { diag } from './diag-api'; +import { metrics } from './metrics-api'; +import { propagation } from './propagation-api'; +import { trace } from './trace-api'; +export { context, diag, metrics, propagation, trace }; +declare const _default: { + context: import("./api/context").ContextAPI; + diag: import("./api/diag").DiagAPI; + metrics: import("./api/metrics").MetricsAPI; + propagation: import("./api/propagation").PropagationAPI; + trace: import("./api/trace").TraceAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/index.js b/node_modules/@opentelemetry/api/build/esnext/index.js new file mode 100644 index 0000000..123dc4d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/index.js @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { baggageEntryMetadataFromString } from './baggage/utils'; +// Context APIs +export { createContextKey, ROOT_CONTEXT } from './context/context'; +// Diag APIs +export { DiagConsoleLogger } from './diag/consoleLogger'; +export { DiagLogLevel, } from './diag/types'; +// Metrics APIs +export { createNoopMeter } from './metrics/NoopMeter'; +export { ValueType, } from './metrics/Metric'; +// Propagation APIs +export { defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; +export { ProxyTracer } from './trace/ProxyTracer'; +export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; +export { SamplingDecision } from './trace/SamplingResult'; +export { SpanKind } from './trace/span_kind'; +export { SpanStatusCode } from './trace/status'; +export { TraceFlags } from './trace/trace_flags'; +export { createTraceState } from './trace/internal/utils'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { context } from './context-api'; +import { diag } from './diag-api'; +import { metrics } from './metrics-api'; +import { propagation } from './propagation-api'; +import { trace } from './trace-api'; +// Named export. +export { context, diag, metrics, propagation, trace }; +// Default export. +export default { + context, + diag, + metrics, + propagation, + trace, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/index.js.map b/node_modules/@opentelemetry/api/build/esnext/index.js.map new file mode 100644 index 0000000..1f0fee8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,8BAA8B,EAAE,MAAM,iBAAiB,CAAC;AAKjE,eAAe;AACf,OAAO,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAInE,YAAY;AACZ,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AACzD,OAAO,EAGL,YAAY,GAGb,MAAM,cAAc,CAAC;AAGtB,eAAe;AACf,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAGtD,OAAO,EACL,SAAS,GAcV,MAAM,kBAAkB,CAAC;AAO1B,mBAAmB;AACnB,OAAO,EAIL,oBAAoB,EACpB,oBAAoB,GACrB,MAAM,iCAAiC,CAAC;AAMzC,OAAO,EAAE,WAAW,EAAmB,MAAM,qBAAqB,CAAC;AACnE,OAAO,EAAE,mBAAmB,EAAE,MAAM,6BAA6B,CAAC;AAElE,OAAO,EAAE,gBAAgB,EAAkB,MAAM,wBAAwB,CAAC;AAE1E,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAG7C,OAAO,EAAc,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAC5D,OAAO,EAAE,UAAU,EAAE,MAAM,qBAAqB,CAAC;AAEjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAI1D,OAAO,EACL,kBAAkB,EAClB,cAAc,EACd,aAAa,GACd,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,cAAc,EACd,eAAe,EACf,oBAAoB,GACrB,MAAM,gCAAgC,CAAC;AAGxC,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AACxC,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AACxC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAChD,OAAO,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AAEpC,gBAAgB;AAChB,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,KAAK,EAAE,CAAC;AACtD,kBAAkB;AAClB,eAAe;IACb,OAAO;IACP,IAAI;IACJ,OAAO;IACP,WAAW;IACX,KAAK;CACN,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport { Exception } from './common/Exception';\nexport { HrTime, TimeInput } from './common/Time';\nexport { Attributes, AttributeValue } from './common/Attributes';\n\n// Context APIs\nexport { createContextKey, ROOT_CONTEXT } from './context/context';\nexport { Context, ContextManager } from './context/types';\nexport type { ContextAPI } from './api/context';\n\n// Diag APIs\nexport { DiagConsoleLogger } from './diag/consoleLogger';\nexport {\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n ComponentLoggerOptions,\n DiagLoggerOptions,\n} from './diag/types';\nexport type { DiagAPI } from './api/diag';\n\n// Metrics APIs\nexport { createNoopMeter } from './metrics/NoopMeter';\nexport { MeterOptions, Meter } from './metrics/Meter';\nexport { MeterProvider } from './metrics/MeterProvider';\nexport {\n ValueType,\n Counter,\n Histogram,\n MetricOptions,\n Observable,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n BatchObservableCallback,\n MetricAdvice,\n MetricAttributes,\n MetricAttributeValue,\n ObservableCallback,\n} from './metrics/Metric';\nexport {\n BatchObservableResult,\n ObservableResult,\n} from './metrics/ObservableResult';\nexport type { MetricsAPI } from './api/metrics';\n\n// Propagation APIs\nexport {\n TextMapPropagator,\n TextMapSetter,\n TextMapGetter,\n defaultTextMapGetter,\n defaultTextMapSetter,\n} from './propagation/TextMapPropagator';\nexport type { PropagationAPI } from './api/propagation';\n\n// Trace APIs\nexport { SpanAttributes, SpanAttributeValue } from './trace/attributes';\nexport { Link } from './trace/link';\nexport { ProxyTracer, TracerDelegator } from './trace/ProxyTracer';\nexport { ProxyTracerProvider } from './trace/ProxyTracerProvider';\nexport { Sampler } from './trace/Sampler';\nexport { SamplingDecision, SamplingResult } from './trace/SamplingResult';\nexport { SpanContext } from './trace/span_context';\nexport { SpanKind } from './trace/span_kind';\nexport { Span } from './trace/span';\nexport { SpanOptions } from './trace/SpanOptions';\nexport { SpanStatus, SpanStatusCode } from './trace/status';\nexport { TraceFlags } from './trace/trace_flags';\nexport { TraceState } from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport { TracerProvider } from './trace/tracer_provider';\nexport { Tracer } from './trace/tracer';\nexport { TracerOptions } from './trace/tracer_options';\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\nexport type { TraceAPI } from './api/trace';\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { context } from './context-api';\nimport { diag } from './diag-api';\nimport { metrics } from './metrics-api';\nimport { propagation } from './propagation-api';\nimport { trace } from './trace-api';\n\n// Named export.\nexport { context, diag, metrics, propagation, trace };\n// Default export.\nexport default {\n context,\n diag,\n metrics,\n propagation,\n trace,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.d.ts new file mode 100644 index 0000000..320db97 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.d.ts @@ -0,0 +1,18 @@ +import { MeterProvider } from '../metrics/MeterProvider'; +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag/types'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + metrics?: MeterProvider; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js new file mode 100644 index 0000000..acdd185 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { _globalThis } from '../platform'; +import { VERSION } from '../version'; +import { isCompatible } from './semver'; +const major = VERSION.split('.')[0]; +const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); +const _global = _globalThis; +export function registerGlobal(type, instance, diag, allowOverride = false) { + var _a; + const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== VERSION) { + // All registered APIs must be of the same version exactly + const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${VERSION}`); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug(`@opentelemetry/api: Registered a global for ${type} v${VERSION}.`); + return true; +} +export function getGlobal(type) { + var _a, _b; + const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +export function unregisterGlobal(type, diag) { + diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`); + const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js.map new file mode 100644 index 0000000..6c95b12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAG1C,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAExC,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,MAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,wBAAwB,KAAK,EAAE,CAChC,CAAC;AAEF,MAAM,OAAO,GAAG,WAAyB,CAAC;AAE1C,MAAM,UAAU,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAa,GAAG,KAAK;;IAErB,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,OAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,MAAM,GAAG,GAAG,IAAI,KAAK,CACnB,gEAAgE,IAAI,EAAE,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,OAAO,EAAE;QAC3B,0DAA0D;QAC1D,MAAM,GAAG,GAAG,IAAI,KAAK,CACnB,gDAAgD,GAAG,CAAC,OAAO,QAAQ,IAAI,8CAA8C,OAAO,EAAE,CAC/H,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,+CAA+C,IAAI,KAAK,OAAO,GAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,UAAU,SAAS,CACvB,IAAU;;IAEV,MAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,kDAAkD,IAAI,KAAK,OAAO,GAAG,CACtE,CAAC;IACF,MAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { MeterProvider } from '../metrics/MeterProvider';\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag/types';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n `@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${VERSION}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n metrics?: MeterProvider;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/esnext/internal/semver.d.ts new file mode 100644 index 0000000..d9f4259 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/semver.js b/node_modules/@opentelemetry/api/build/esnext/internal/semver.js new file mode 100644 index 0000000..85e5980 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/internal/semver.js @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { VERSION } from '../version'; +const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export function _makeCompatibilityCheck(ownVersion) { + const acceptedVersions = new Set([ownVersion]); + const rejectedVersions = new Set(); + const myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return () => false; + } + const ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + const globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + const globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export const isCompatible = _makeCompatibilityCheck(VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/semver.js.map b/node_modules/@opentelemetry/api/build/esnext/internal/semver.js.map new file mode 100644 index 0000000..45cc4a3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAErC,MAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,uBAAuB,CACrC,UAAkB;IAElB,MAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,MAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,MAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,GAAG,EAAE,CAAC,KAAK,CAAC;KACpB;IAED,MAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,MAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,MAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,MAAM,YAAY,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics-api.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics-api.d.ts new file mode 100644 index 0000000..26d539c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics-api.d.ts @@ -0,0 +1,4 @@ +import { MetricsAPI } from './api/metrics'; +/** Entrypoint for metrics API */ +export declare const metrics: MetricsAPI; +//# sourceMappingURL=metrics-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics-api.js b/node_modules/@opentelemetry/api/build/esnext/metrics-api.js new file mode 100644 index 0000000..624c886 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { MetricsAPI } from './api/metrics'; +/** Entrypoint for metrics API */ +export const metrics = MetricsAPI.getInstance(); +//# sourceMappingURL=metrics-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics-api.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics-api.js.map new file mode 100644 index 0000000..eb011b6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"metrics-api.js","sourceRoot":"","sources":["../../src/metrics-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAC3C,iCAAiC;AACjC,MAAM,CAAC,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { MetricsAPI } from './api/metrics';\n/** Entrypoint for metrics API */\nexport const metrics = MetricsAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.d.ts new file mode 100644 index 0000000..fb26fc8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.d.ts @@ -0,0 +1,104 @@ +import { BatchObservableCallback, Counter, Histogram, MetricAttributes, MetricOptions, Observable, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter } from './Metric'; +/** + * An interface describes additional metadata of a meter. + */ +export interface MeterOptions { + /** + * The schemaUrl of the meter or instrumentation library + */ + schemaUrl?: string; +} +/** + * An interface to allow the recording metrics. + * + * {@link Metric}s are used for recording pre-defined aggregation (`Counter`), + * or raw values (`Histogram`) in which the aggregation and attributes + * for the exported metric are deferred. + */ +export interface Meter { + /** + * Creates and returns a new `Histogram`. + * @param name the name of the metric. + * @param [options] the metric options. + */ + createHistogram(name: string, options?: MetricOptions): Histogram; + /** + * Creates a new `Counter` metric. Generally, this kind of metric when the + * value is a quantity, the sum is of primary interest, and the event count + * and value distribution are not of primary interest. + * @param name the name of the metric. + * @param [options] the metric options. + */ + createCounter(name: string, options?: MetricOptions): Counter; + /** + * Creates a new `UpDownCounter` metric. UpDownCounter is a synchronous + * instrument and very similar to Counter except that Add(increment) + * supports negative increments. It is generally useful for capturing changes + * in an amount of resources used, or any quantity that rises and falls + * during a request. + * Example uses for UpDownCounter: + *
            + *
          1. count the number of active requests.
          2. + *
          3. count memory in use by instrumenting new and delete.
          4. + *
          5. count queue size by instrumenting enqueue and dequeue.
          6. + *
          7. count semaphore up and down operations.
          8. + *
          + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createUpDownCounter(name: string, options?: MetricOptions): UpDownCounter; + /** + * Creates a new `ObservableGauge` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableGauge(name: string, options?: MetricOptions): ObservableGauge; + /** + * Creates a new `ObservableCounter` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableCounter(name: string, options?: MetricOptions): ObservableCounter; + /** + * Creates a new `ObservableUpDownCounter` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableUpDownCounter(name: string, options?: MetricOptions): ObservableUpDownCounter; + /** + * Sets up a function that will be called whenever a metric collection is + * initiated. + * + * If the function is already in the list of callbacks for this Observable, + * the function is not added a second time. + * + * Only the associated observables can be observed in the callback. + * Measurements of observables that are not associated observed in the + * callback are dropped. + * + * @param callback the batch observable callback + * @param observables the observables associated with this batch observable callback + */ + addBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void; + /** + * Removes a callback previously registered with {@link Meter.addBatchObservableCallback}. + * + * The callback to be removed is identified using a combination of the callback itself, + * and the set of the observables associated with it. + * + * @param callback the batch observable callback + * @param observables the observables associated with this batch observable callback + */ + removeBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void; +} +//# sourceMappingURL=Meter.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js new file mode 100644 index 0000000..f1d0754 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Meter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js.map new file mode 100644 index 0000000..8c6841d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Meter.js","sourceRoot":"","sources":["../../../src/metrics/Meter.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n BatchObservableCallback,\n Counter,\n Histogram,\n MetricAttributes,\n MetricOptions,\n Observable,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n} from './Metric';\n\n/**\n * An interface describes additional metadata of a meter.\n */\nexport interface MeterOptions {\n /**\n * The schemaUrl of the meter or instrumentation library\n */\n schemaUrl?: string;\n}\n\n/**\n * An interface to allow the recording metrics.\n *\n * {@link Metric}s are used for recording pre-defined aggregation (`Counter`),\n * or raw values (`Histogram`) in which the aggregation and attributes\n * for the exported metric are deferred.\n */\nexport interface Meter {\n /**\n * Creates and returns a new `Histogram`.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createHistogram(\n name: string,\n options?: MetricOptions\n ): Histogram;\n\n /**\n * Creates a new `Counter` metric. Generally, this kind of metric when the\n * value is a quantity, the sum is of primary interest, and the event count\n * and value distribution are not of primary interest.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createCounter(\n name: string,\n options?: MetricOptions\n ): Counter;\n\n /**\n * Creates a new `UpDownCounter` metric. UpDownCounter is a synchronous\n * instrument and very similar to Counter except that Add(increment)\n * supports negative increments. It is generally useful for capturing changes\n * in an amount of resources used, or any quantity that rises and falls\n * during a request.\n * Example uses for UpDownCounter:\n *
            \n *
          1. count the number of active requests.
          2. \n *
          3. count memory in use by instrumenting new and delete.
          4. \n *
          5. count queue size by instrumenting enqueue and dequeue.
          6. \n *
          7. count semaphore up and down operations.
          8. \n *
          \n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): UpDownCounter;\n\n /**\n * Creates a new `ObservableGauge` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableGauge<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableGauge;\n\n /**\n * Creates a new `ObservableCounter` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableCounter;\n\n /**\n * Creates a new `ObservableUpDownCounter` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableUpDownCounter;\n\n /**\n * Sets up a function that will be called whenever a metric collection is\n * initiated.\n *\n * If the function is already in the list of callbacks for this Observable,\n * the function is not added a second time.\n *\n * Only the associated observables can be observed in the callback.\n * Measurements of observables that are not associated observed in the\n * callback are dropped.\n *\n * @param callback the batch observable callback\n * @param observables the observables associated with this batch observable callback\n */\n addBatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n callback: BatchObservableCallback,\n observables: Observable[]\n ): void;\n\n /**\n * Removes a callback previously registered with {@link Meter.addBatchObservableCallback}.\n *\n * The callback to be removed is identified using a combination of the callback itself,\n * and the set of the observables associated with it.\n *\n * @param callback the batch observable callback\n * @param observables the observables associated with this batch observable callback\n */\n removeBatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n callback: BatchObservableCallback,\n observables: Observable[]\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.d.ts new file mode 100644 index 0000000..6c08cc3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.d.ts @@ -0,0 +1,17 @@ +import { Meter, MeterOptions } from './Meter'; +/** + * A registry for creating named {@link Meter}s. + */ +export interface MeterProvider { + /** + * Returns a Meter, creating one if one with the given name, version, and + * schemaUrl pair is not already created. + * + * @param name The name of the meter or instrumentation library. + * @param version The version of the meter or instrumentation library. + * @param options The options of the meter or instrumentation library. + * @returns Meter A Meter with the given name and version + */ + getMeter(name: string, version?: string, options?: MeterOptions): Meter; +} +//# sourceMappingURL=MeterProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js new file mode 100644 index 0000000..3051712 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=MeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js.map new file mode 100644 index 0000000..8f96d90 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"MeterProvider.js","sourceRoot":"","sources":["../../../src/metrics/MeterProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from './Meter';\n\n/**\n * A registry for creating named {@link Meter}s.\n */\nexport interface MeterProvider {\n /**\n * Returns a Meter, creating one if one with the given name, version, and\n * schemaUrl pair is not already created.\n *\n * @param name The name of the meter or instrumentation library.\n * @param version The version of the meter or instrumentation library.\n * @param options The options of the meter or instrumentation library.\n * @returns Meter A Meter with the given name and version\n */\n getMeter(name: string, version?: string, options?: MeterOptions): Meter;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.d.ts new file mode 100644 index 0000000..87a41a8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.d.ts @@ -0,0 +1,109 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +import { Context } from '../context/types'; +import { BatchObservableResult, ObservableResult } from './ObservableResult'; +/** + * Advisory options influencing aggregation configuration parameters. + * @experimental + */ +export interface MetricAdvice { + /** + * Hint the explicit bucket boundaries for SDK if the metric is been + * aggregated with a HistogramAggregator. + */ + explicitBucketBoundaries?: number[]; +} +/** + * Options needed for metric creation + */ +export interface MetricOptions { + /** + * The description of the Metric. + * @default '' + */ + description?: string; + /** + * The unit of the Metric values. + * @default '' + */ + unit?: string; + /** + * Indicates the type of the recorded value. + * @default {@link ValueType.DOUBLE} + */ + valueType?: ValueType; + /** + * The advice influencing aggregation configuration parameters. + * @experimental + */ + advice?: MetricAdvice; +} +/** The Type of value. It describes how the data is reported. */ +export declare enum ValueType { + INT = 0, + DOUBLE = 1 +} +/** + * Counter is the most common synchronous instrument. This instrument supports + * an `Add(increment)` function for reporting a sum, and is restricted to + * non-negative increments. The default aggregation is Sum, as for any additive + * instrument. + * + * Example uses for Counter: + *
            + *
          1. count the number of bytes received.
          2. + *
          3. count the number of requests completed.
          4. + *
          5. count the number of accounts created.
          6. + *
          7. count the number of checkpoints run.
          8. + *
          9. count the number of 5xx errors.
          10. + *
              + */ +export interface Counter { + /** + * Increment value of counter by the input. Inputs must not be negative. + */ + add(value: number, attributes?: AttributesTypes, context?: Context): void; +} +export interface UpDownCounter { + /** + * Increment value of counter by the input. Inputs may be negative. + */ + add(value: number, attributes?: AttributesTypes, context?: Context): void; +} +export interface Histogram { + /** + * Records a measurement. Value of the measurement must not be negative. + */ + record(value: number, attributes?: AttributesTypes, context?: Context): void; +} +/** + * @deprecated please use {@link Attributes} + */ +export declare type MetricAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type MetricAttributeValue = AttributeValue; +/** + * The observable callback for Observable instruments. + */ +export declare type ObservableCallback = (observableResult: ObservableResult) => void | Promise; +/** + * The observable callback for a batch of Observable instruments. + */ +export declare type BatchObservableCallback = (observableResult: BatchObservableResult) => void | Promise; +export interface Observable { + /** + * Sets up a function that will be called whenever a metric collection is initiated. + * + * If the function is already in the list of callbacks for this Observable, the function is not added a second time. + */ + addCallback(callback: ObservableCallback): void; + /** + * Removes a callback previously registered with {@link Observable.addCallback}. + */ + removeCallback(callback: ObservableCallback): void; +} +export declare type ObservableCounter = Observable; +export declare type ObservableUpDownCounter = Observable; +export declare type ObservableGauge = Observable; +//# sourceMappingURL=Metric.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js new file mode 100644 index 0000000..6df1374 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** The Type of value. It describes how the data is reported. */ +export var ValueType; +(function (ValueType) { + ValueType[ValueType["INT"] = 0] = "INT"; + ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; +})(ValueType || (ValueType = {})); +//# sourceMappingURL=Metric.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js.map new file mode 100644 index 0000000..c8aef63 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Metric.js","sourceRoot":"","sources":["../../../src/metrics/Metric.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA+CH,gEAAgE;AAChE,MAAM,CAAN,IAAY,SAGX;AAHD,WAAY,SAAS;IACnB,uCAAG,CAAA;IACH,6CAAM,CAAA;AACR,CAAC,EAHW,SAAS,KAAT,SAAS,QAGpB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\nimport { Context } from '../context/types';\nimport { BatchObservableResult, ObservableResult } from './ObservableResult';\n\n/**\n * Advisory options influencing aggregation configuration parameters.\n * @experimental\n */\nexport interface MetricAdvice {\n /**\n * Hint the explicit bucket boundaries for SDK if the metric is been\n * aggregated with a HistogramAggregator.\n */\n explicitBucketBoundaries?: number[];\n}\n\n/**\n * Options needed for metric creation\n */\nexport interface MetricOptions {\n /**\n * The description of the Metric.\n * @default ''\n */\n description?: string;\n\n /**\n * The unit of the Metric values.\n * @default ''\n */\n unit?: string;\n\n /**\n * Indicates the type of the recorded value.\n * @default {@link ValueType.DOUBLE}\n */\n valueType?: ValueType;\n\n /**\n * The advice influencing aggregation configuration parameters.\n * @experimental\n */\n advice?: MetricAdvice;\n}\n\n/** The Type of value. It describes how the data is reported. */\nexport enum ValueType {\n INT,\n DOUBLE,\n}\n\n/**\n * Counter is the most common synchronous instrument. This instrument supports\n * an `Add(increment)` function for reporting a sum, and is restricted to\n * non-negative increments. The default aggregation is Sum, as for any additive\n * instrument.\n *\n * Example uses for Counter:\n *
                \n *
              1. count the number of bytes received.
              2. \n *
              3. count the number of requests completed.
              4. \n *
              5. count the number of accounts created.
              6. \n *
              7. count the number of checkpoints run.
              8. \n *
              9. count the number of 5xx errors.
              10. \n *
                  \n */\nexport interface Counter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Increment value of counter by the input. Inputs must not be negative.\n */\n add(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\nexport interface UpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Increment value of counter by the input. Inputs may be negative.\n */\n add(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\nexport interface Histogram<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Records a measurement. Value of the measurement must not be negative.\n */\n record(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type MetricAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type MetricAttributeValue = AttributeValue;\n\n/**\n * The observable callback for Observable instruments.\n */\nexport type ObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = (\n observableResult: ObservableResult\n) => void | Promise;\n\n/**\n * The observable callback for a batch of Observable instruments.\n */\nexport type BatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = (\n observableResult: BatchObservableResult\n) => void | Promise;\n\nexport interface Observable<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Sets up a function that will be called whenever a metric collection is initiated.\n *\n * If the function is already in the list of callbacks for this Observable, the function is not added a second time.\n */\n addCallback(callback: ObservableCallback): void;\n\n /**\n * Removes a callback previously registered with {@link Observable.addCallback}.\n */\n removeCallback(callback: ObservableCallback): void;\n}\n\nexport type ObservableCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\nexport type ObservableUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\nexport type ObservableGauge<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.d.ts new file mode 100644 index 0000000..0edbd2d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.d.ts @@ -0,0 +1,74 @@ +import { Meter } from './Meter'; +import { BatchObservableCallback, Counter, Histogram, MetricOptions, ObservableCallback, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, MetricAttributes, Observable } from './Metric'; +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +export declare class NoopMeter implements Meter { + constructor(); + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name: string, _options?: MetricOptions): Histogram; + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name: string, _options?: MetricOptions): Counter; + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name: string, _options?: MetricOptions): UpDownCounter; + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name: string, _options?: MetricOptions): ObservableGauge; + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name: string, _options?: MetricOptions): ObservableCounter; + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name: string, _options?: MetricOptions): ObservableUpDownCounter; + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback: BatchObservableCallback, _observables: Observable[]): void; + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback: BatchObservableCallback): void; +} +export declare class NoopMetric { +} +export declare class NoopCounterMetric extends NoopMetric implements Counter { + add(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopUpDownCounterMetric extends NoopMetric implements UpDownCounter { + add(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopHistogramMetric extends NoopMetric implements Histogram { + record(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopObservableMetric { + addCallback(_callback: ObservableCallback): void; + removeCallback(_callback: ObservableCallback): void; +} +export declare class NoopObservableCounterMetric extends NoopObservableMetric implements ObservableCounter { +} +export declare class NoopObservableGaugeMetric extends NoopObservableMetric implements ObservableGauge { +} +export declare class NoopObservableUpDownCounterMetric extends NoopObservableMetric implements ObservableUpDownCounter { +} +export declare const NOOP_METER: NoopMeter; +export declare const NOOP_COUNTER_METRIC: NoopCounterMetric; +export declare const NOOP_HISTOGRAM_METRIC: NoopHistogramMetric; +export declare const NOOP_UP_DOWN_COUNTER_METRIC: NoopUpDownCounterMetric; +export declare const NOOP_OBSERVABLE_COUNTER_METRIC: NoopObservableCounterMetric; +export declare const NOOP_OBSERVABLE_GAUGE_METRIC: NoopObservableGaugeMetric; +export declare const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC: NoopObservableUpDownCounterMetric; +/** + * Create a no-op Meter + */ +export declare function createNoopMeter(): Meter; +//# sourceMappingURL=NoopMeter.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js new file mode 100644 index 0000000..65659c0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js @@ -0,0 +1,103 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +export class NoopMeter { + constructor() { } + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name, _options) { + return NOOP_HISTOGRAM_METRIC; + } + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name, _options) { + return NOOP_COUNTER_METRIC; + } + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name, _options) { + return NOOP_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name, _options) { + return NOOP_OBSERVABLE_GAUGE_METRIC; + } + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name, _options) { + return NOOP_OBSERVABLE_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name, _options) { + return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback, _observables) { } + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback) { } +} +export class NoopMetric { +} +export class NoopCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +export class NoopUpDownCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +export class NoopHistogramMetric extends NoopMetric { + record(_value, _attributes) { } +} +export class NoopObservableMetric { + addCallback(_callback) { } + removeCallback(_callback) { } +} +export class NoopObservableCounterMetric extends NoopObservableMetric { +} +export class NoopObservableGaugeMetric extends NoopObservableMetric { +} +export class NoopObservableUpDownCounterMetric extends NoopObservableMetric { +} +export const NOOP_METER = new NoopMeter(); +// Synchronous instruments +export const NOOP_COUNTER_METRIC = new NoopCounterMetric(); +export const NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); +export const NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); +// Asynchronous instruments +export const NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); +export const NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); +export const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); +/** + * Create a no-op Meter + */ +export function createNoopMeter() { + return NOOP_METER; +} +//# sourceMappingURL=NoopMeter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js.map new file mode 100644 index 0000000..4c6ca84 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopMeter.js","sourceRoot":"","sources":["../../../src/metrics/NoopMeter.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAiBH;;;GAGG;AACH,MAAM,OAAO,SAAS;IACpB,gBAAe,CAAC;IAEhB;;OAEG;IACH,eAAe,CAAC,KAAa,EAAE,QAAwB;QACrD,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,KAAa,EAAE,QAAwB;QACnD,OAAO,mBAAmB,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,mBAAmB,CAAC,KAAa,EAAE,QAAwB;QACzD,OAAO,2BAA2B,CAAC;IACrC,CAAC;IAED;;OAEG;IACH,qBAAqB,CACnB,KAAa,EACb,QAAwB;QAExB,OAAO,4BAA4B,CAAC;IACtC,CAAC;IAED;;OAEG;IACH,uBAAuB,CACrB,KAAa,EACb,QAAwB;QAExB,OAAO,8BAA8B,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,6BAA6B,CAC3B,KAAa,EACb,QAAwB;QAExB,OAAO,sCAAsC,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,0BAA0B,CACxB,SAAkC,EAClC,YAA0B,IACnB,CAAC;IAEV;;OAEG;IACH,6BAA6B,CAAC,SAAkC,IAAS,CAAC;CAC3E;AAED,MAAM,OAAO,UAAU;CAAG;AAE1B,MAAM,OAAO,iBAAkB,SAAQ,UAAU;IAC/C,GAAG,CAAC,MAAc,EAAE,WAA6B,IAAS,CAAC;CAC5D;AAED,MAAM,OAAO,uBACX,SAAQ,UAAU;IAGlB,GAAG,CAAC,MAAc,EAAE,WAA6B,IAAS,CAAC;CAC5D;AAED,MAAM,OAAO,mBAAoB,SAAQ,UAAU;IACjD,MAAM,CAAC,MAAc,EAAE,WAA6B,IAAS,CAAC;CAC/D;AAED,MAAM,OAAO,oBAAoB;IAC/B,WAAW,CAAC,SAA6B,IAAG,CAAC;IAE7C,cAAc,CAAC,SAA6B,IAAG,CAAC;CACjD;AAED,MAAM,OAAO,2BACX,SAAQ,oBAAoB;CACG;AAEjC,MAAM,OAAO,yBACX,SAAQ,oBAAoB;CACC;AAE/B,MAAM,OAAO,iCACX,SAAQ,oBAAoB;CACS;AAEvC,MAAM,CAAC,MAAM,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;AAE1C,0BAA0B;AAC1B,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAC3D,MAAM,CAAC,MAAM,qBAAqB,GAAG,IAAI,mBAAmB,EAAE,CAAC;AAC/D,MAAM,CAAC,MAAM,2BAA2B,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAEzE,2BAA2B;AAC3B,MAAM,CAAC,MAAM,8BAA8B,GAAG,IAAI,2BAA2B,EAAE,CAAC;AAChF,MAAM,CAAC,MAAM,4BAA4B,GAAG,IAAI,yBAAyB,EAAE,CAAC;AAC5E,MAAM,CAAC,MAAM,sCAAsC,GACjD,IAAI,iCAAiC,EAAE,CAAC;AAE1C;;GAEG;AACH,MAAM,UAAU,eAAe;IAC7B,OAAO,UAAU,CAAC;AACpB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter } from './Meter';\nimport {\n BatchObservableCallback,\n Counter,\n Histogram,\n MetricOptions,\n ObservableCallback,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n MetricAttributes,\n Observable,\n} from './Metric';\n\n/**\n * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses\n * constant NoopMetrics for all of its methods.\n */\nexport class NoopMeter implements Meter {\n constructor() {}\n\n /**\n * @see {@link Meter.createHistogram}\n */\n createHistogram(_name: string, _options?: MetricOptions): Histogram {\n return NOOP_HISTOGRAM_METRIC;\n }\n\n /**\n * @see {@link Meter.createCounter}\n */\n createCounter(_name: string, _options?: MetricOptions): Counter {\n return NOOP_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createUpDownCounter}\n */\n createUpDownCounter(_name: string, _options?: MetricOptions): UpDownCounter {\n return NOOP_UP_DOWN_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableGauge}\n */\n createObservableGauge(\n _name: string,\n _options?: MetricOptions\n ): ObservableGauge {\n return NOOP_OBSERVABLE_GAUGE_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableCounter}\n */\n createObservableCounter(\n _name: string,\n _options?: MetricOptions\n ): ObservableCounter {\n return NOOP_OBSERVABLE_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableUpDownCounter}\n */\n createObservableUpDownCounter(\n _name: string,\n _options?: MetricOptions\n ): ObservableUpDownCounter {\n return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.addBatchObservableCallback}\n */\n addBatchObservableCallback(\n _callback: BatchObservableCallback,\n _observables: Observable[]\n ): void {}\n\n /**\n * @see {@link Meter.removeBatchObservableCallback}\n */\n removeBatchObservableCallback(_callback: BatchObservableCallback): void {}\n}\n\nexport class NoopMetric {}\n\nexport class NoopCounterMetric extends NoopMetric implements Counter {\n add(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopUpDownCounterMetric\n extends NoopMetric\n implements UpDownCounter\n{\n add(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopHistogramMetric extends NoopMetric implements Histogram {\n record(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopObservableMetric {\n addCallback(_callback: ObservableCallback) {}\n\n removeCallback(_callback: ObservableCallback) {}\n}\n\nexport class NoopObservableCounterMetric\n extends NoopObservableMetric\n implements ObservableCounter {}\n\nexport class NoopObservableGaugeMetric\n extends NoopObservableMetric\n implements ObservableGauge {}\n\nexport class NoopObservableUpDownCounterMetric\n extends NoopObservableMetric\n implements ObservableUpDownCounter {}\n\nexport const NOOP_METER = new NoopMeter();\n\n// Synchronous instruments\nexport const NOOP_COUNTER_METRIC = new NoopCounterMetric();\nexport const NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric();\nexport const NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric();\n\n// Asynchronous instruments\nexport const NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric();\nexport const NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric();\nexport const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC =\n new NoopObservableUpDownCounterMetric();\n\n/**\n * Create a no-op Meter\n */\nexport function createNoopMeter(): Meter {\n return NOOP_METER;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.d.ts new file mode 100644 index 0000000..8b51bc5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.d.ts @@ -0,0 +1,11 @@ +import { Meter, MeterOptions } from './Meter'; +import { MeterProvider } from './MeterProvider'; +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +export declare class NoopMeterProvider implements MeterProvider { + getMeter(_name: string, _version?: string, _options?: MeterOptions): Meter; +} +export declare const NOOP_METER_PROVIDER: NoopMeterProvider; +//# sourceMappingURL=NoopMeterProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js new file mode 100644 index 0000000..075f5c6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NOOP_METER } from './NoopMeter'; +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +export class NoopMeterProvider { + getMeter(_name, _version, _options) { + return NOOP_METER; + } +} +export const NOOP_METER_PROVIDER = new NoopMeterProvider(); +//# sourceMappingURL=NoopMeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js.map new file mode 100644 index 0000000..c68783a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopMeterProvider.js","sourceRoot":"","sources":["../../../src/metrics/NoopMeterProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;GAGG;AACH,MAAM,OAAO,iBAAiB;IAC5B,QAAQ,CAAC,KAAa,EAAE,QAAiB,EAAE,QAAuB;QAChE,OAAO,UAAU,CAAC;IACpB,CAAC;CACF;AAED,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,iBAAiB,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from './Meter';\nimport { MeterProvider } from './MeterProvider';\nimport { NOOP_METER } from './NoopMeter';\n\n/**\n * An implementation of the {@link MeterProvider} which returns an impotent Meter\n * for all calls to `getMeter`\n */\nexport class NoopMeterProvider implements MeterProvider {\n getMeter(_name: string, _version?: string, _options?: MeterOptions): Meter {\n return NOOP_METER;\n }\n}\n\nexport const NOOP_METER_PROVIDER = new NoopMeterProvider();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.d.ts b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.d.ts new file mode 100644 index 0000000..26563f9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.d.ts @@ -0,0 +1,31 @@ +import { MetricAttributes, Observable } from './Metric'; +/** + * Interface that is being used in callback function for Observable Metric. + */ +export interface ObservableResult { + /** + * Observe a measurement of the value associated with the given attributes. + * + * @param value The value to be observed. + * @param attributes The attributes associated with the value. If more than + * one values associated with the same attributes values, SDK may pick the + * last one or simply drop the entire observable result. + */ + observe(this: ObservableResult, value: number, attributes?: AttributesTypes): void; +} +/** + * Interface that is being used in batch observable callback function. + */ +export interface BatchObservableResult { + /** + * Observe a measurement of the value associated with the given attributes. + * + * @param metric The observable metric to be observed. + * @param value The value to be observed. + * @param attributes The attributes associated with the value. If more than + * one values associated with the same attributes values, SDK may pick the + * last one or simply drop the entire observable result. + */ + observe(this: BatchObservableResult, metric: Observable, value: number, attributes?: AttributesTypes): void; +} +//# sourceMappingURL=ObservableResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js new file mode 100644 index 0000000..7985d26 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=ObservableResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js.map b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js.map new file mode 100644 index 0000000..b6b0c7c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ObservableResult.js","sourceRoot":"","sources":["../../../src/metrics/ObservableResult.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { MetricAttributes, Observable } from './Metric';\n\n/**\n * Interface that is being used in callback function for Observable Metric.\n */\nexport interface ObservableResult<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Observe a measurement of the value associated with the given attributes.\n *\n * @param value The value to be observed.\n * @param attributes The attributes associated with the value. If more than\n * one values associated with the same attributes values, SDK may pick the\n * last one or simply drop the entire observable result.\n */\n observe(\n this: ObservableResult,\n value: number,\n attributes?: AttributesTypes\n ): void;\n}\n\n/**\n * Interface that is being used in batch observable callback function.\n */\nexport interface BatchObservableResult<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Observe a measurement of the value associated with the given attributes.\n *\n * @param metric The observable metric to be observed.\n * @param value The value to be observed.\n * @param attributes The attributes associated with the value. If more than\n * one values associated with the same attributes values, SDK may pick the\n * last one or simply drop the entire observable result.\n */\n observe(\n this: BatchObservableResult,\n metric: Observable,\n value: number,\n attributes?: AttributesTypes\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.d.ts new file mode 100644 index 0000000..e73fd73 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js new file mode 100644 index 0000000..e9d58b3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Updates to this file should also be replicated to @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +export const _globalThis = typeof globalThis === 'object' + ? globalThis + : typeof self === 'object' + ? self + : typeof window === 'object' + ? window + : typeof global === 'object' + ? global + : {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js.map new file mode 100644 index 0000000..c01e2ab --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,6EAA6E;AAE7E;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AAC9E,MAAM,CAAC,MAAM,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ;IAC5B,CAAC,CAAC,UAAU;IACZ,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ;QAC1B,CAAC,CAAC,IAAI;QACN,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ;YAC5B,CAAC,CAAC,MAAM;YACR,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ;gBAC5B,CAAC,CAAC,MAAM;gBACR,CAAC,CAAE,EAAwB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object'\n ? globalThis\n : typeof self === 'object'\n ? self\n : typeof window === 'object'\n ? window\n : typeof global === 'object'\n ? global\n : ({} as typeof globalThis);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.d.ts new file mode 100644 index 0000000..ba20e12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js new file mode 100644 index 0000000..efcad2e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js.map new file mode 100644 index 0000000..07adcd9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/index.d.ts b/node_modules/@opentelemetry/api/build/esnext/platform/index.d.ts new file mode 100644 index 0000000..90595da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/index.js b/node_modules/@opentelemetry/api/build/esnext/platform/index.js new file mode 100644 index 0000000..c0df125 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './node'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/index.js.map b/node_modules/@opentelemetry/api/build/esnext/platform/index.js.map new file mode 100644 index 0000000..9494c53 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,QAAQ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.d.ts new file mode 100644 index 0000000..e3c83e5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js new file mode 100644 index 0000000..7daa45d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js @@ -0,0 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +export const _globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js.map new file mode 100644 index 0000000..7d8366c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,gEAAgE;AAChE,oEAAoE;AACpE,MAAM,CAAC,MAAM,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.d.ts new file mode 100644 index 0000000..ba20e12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js new file mode 100644 index 0000000..efcad2e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js.map new file mode 100644 index 0000000..f279718 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation-api.d.ts b/node_modules/@opentelemetry/api/build/esnext/propagation-api.d.ts new file mode 100644 index 0000000..e12b51b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation-api.d.ts @@ -0,0 +1,4 @@ +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +//# sourceMappingURL=propagation-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation-api.js b/node_modules/@opentelemetry/api/build/esnext/propagation-api.js new file mode 100644 index 0000000..7964ed9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export const propagation = PropagationAPI.getInstance(); +//# sourceMappingURL=propagation-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation-api.js.map b/node_modules/@opentelemetry/api/build/esnext/propagation-api.js.map new file mode 100644 index 0000000..401c0bc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation-api.js","sourceRoot":"","sources":["../../src/propagation-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,qCAAqC;AACrC,MAAM,CAAC,MAAM,WAAW,GAAG,cAAc,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 0000000..398021f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js new file mode 100644 index 0000000..642b84e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js @@ -0,0 +1,30 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export class NoopTextMapPropagator { + /** Noop inject function does nothing */ + inject(_context, _carrier) { } + /** Noop extract function does nothing and returns the input context */ + extract(context, _carrier) { + return context; + } + fields() { + return []; + } +} +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 0000000..29ce493 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH;;GAEG;AACH,MAAM,OAAO,qBAAqB;IAChC,wCAAwC;IACxC,MAAM,CAAC,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,OAAO,CAAC,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,MAAM;QACJ,OAAO,EAAE,CAAC;IACZ,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.d.ts new file mode 100644 index 0000000..dc39367 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js new file mode 100644 index 0000000..4c36995 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export const defaultTextMapGetter = { + get(carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys(carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +export const defaultTextMapSetter = { + set(carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js.map new file mode 100644 index 0000000..d46019b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAkGH,MAAM,CAAC,MAAM,oBAAoB,GAAkB;IACjD,GAAG,CAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAkB;IACjD,GAAG,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace-api.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace-api.d.ts new file mode 100644 index 0000000..b4751a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace-api.d.ts @@ -0,0 +1,4 @@ +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +//# sourceMappingURL=trace-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace-api.js b/node_modules/@opentelemetry/api/build/esnext/trace-api.js new file mode 100644 index 0000000..3a0b9b0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace-api.js @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export const trace = TraceAPI.getInstance(); +//# sourceMappingURL=trace-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace-api.js.map b/node_modules/@opentelemetry/api/build/esnext/trace-api.js.map new file mode 100644 index 0000000..3982312 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace-api.js","sourceRoot":"","sources":["../../src/trace-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,+BAA+B;AAC/B,MAAM,CAAC,MAAM,KAAK,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.d.ts new file mode 100644 index 0000000..883cf91 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js new file mode 100644 index 0000000..b008a6b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPAN_CONTEXT } from './invalid-span-constants'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export class NonRecordingSpan { + constructor(_spanContext = INVALID_SPAN_CONTEXT) { + this._spanContext = _spanContext; + } + // Returns a SpanContext. + spanContext() { + return this._spanContext; + } + // By default does nothing + setAttribute(_key, _value) { + return this; + } + // By default does nothing + setAttributes(_attributes) { + return this; + } + // By default does nothing + addEvent(_name, _attributes) { + return this; + } + // By default does nothing + setStatus(_status) { + return this; + } + // By default does nothing + updateName(_name) { + return this; + } + // By default does nothing + end(_endTime) { } + // isRecording always returns false for NonRecordingSpan. + isRecording() { + return false; + } + // By default does nothing + recordException(_exception, _time) { } +} +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js.map new file mode 100644 index 0000000..38c484c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAKhE;;;;GAIG;AACH,MAAM,OAAO,gBAAgB;IAC3B,YACmB,eAA4B,oBAAoB;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,WAAW;QACT,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,YAAY,CAAC,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,aAAa,CAAC,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,QAAQ,CAAC,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,SAAS,CAAC,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,UAAU,CAAC,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,GAAG,CAAC,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,WAAW;QACT,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,eAAe,CAAC,UAAqB,EAAE,KAAiB,IAAS,CAAC;CACnE","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.d.ts new file mode 100644 index 0000000..0e059c9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js new file mode 100644 index 0000000..ddc7760 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js @@ -0,0 +1,71 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ContextAPI } from '../api/context'; +import { getSpanContext, setSpan } from '../trace/context-utils'; +import { NonRecordingSpan } from './NonRecordingSpan'; +import { isSpanContextValid } from './spancontext-utils'; +const contextApi = ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +export class NoopTracer { + // startSpan starts a noop span. + startSpan(name, options, context = contextApi.active()) { + const root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan(); + } + const parentFromContext = context && getSpanContext(context); + if (isSpanContext(parentFromContext) && + isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan(); + } + } + startActiveSpan(name, arg2, arg3, arg4) { + let opts; + let ctx; + let fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); + const span = this.startSpan(name, opts, parentContext); + const contextWithSpanSet = setSpan(parentContext, span); + return contextApi.with(contextWithSpanSet, fn, undefined, span); + } +} +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js.map new file mode 100644 index 0000000..d2f96c7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,wBAAwB,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAKzD,MAAM,UAAU,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;AAE5C;;GAEG;AACH,MAAM,OAAO,UAAU;IACrB,gCAAgC;IAChC,SAAS,CACP,IAAY,EACZ,OAAqB,EACrB,OAAO,GAAG,UAAU,CAAC,MAAM,EAAE;QAE7B,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;QAED,MAAM,iBAAiB,GAAG,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,kBAAkB,CAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,gBAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,eAAe,CACb,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,MAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,UAAU,CAAC,MAAM,EAAE,CAAC;QACjD,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,MAAM,kBAAkB,GAAG,OAAO,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,UAAU,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAClE,CAAC;CACF;AAED,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst contextApi = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(\n name: string,\n options?: SpanOptions,\n context = contextApi.active()\n ): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? contextApi.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return contextApi.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.d.ts new file mode 100644 index 0000000..ec0fe79 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js new file mode 100644 index 0000000..b542b7d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export class NoopTracerProvider { + getTracer(_name, _version, _options) { + return new NoopTracer(); + } +} +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js.map new file mode 100644 index 0000000..6380d2c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAK1C;;;;;GAKG;AACH,MAAM,OAAO,kBAAkB;IAC7B,SAAS,CACP,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.d.ts new file mode 100644 index 0000000..116cc5c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js new file mode 100644 index 0000000..94dcb02 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +const NOOP_TRACER = new NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +export class ProxyTracer { + constructor(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + startSpan(name, options, context) { + return this._getTracer().startSpan(name, options, context); + } + startActiveSpan(_name, _options, _context, _fn) { + const tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + } + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + _getTracer() { + if (this._delegate) { + return this._delegate; + } + const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + } +} +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js.map new file mode 100644 index 0000000..e946938 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAM1C,MAAM,WAAW,GAAG,IAAI,UAAU,EAAE,CAAC;AAErC;;GAEG;AACH,MAAM,OAAO,WAAW;IAItB,YACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,SAAS,CAAC,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,eAAe,CACb,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,MAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,UAAU;QAChB,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAC7C,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,OAAO,CACb,CAAC;QAEF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(\n this.name,\n this.version,\n this.options\n );\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.d.ts new file mode 100644 index 0000000..ee7eafa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js new file mode 100644 index 0000000..b62fb81 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js @@ -0,0 +1,50 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ProxyTracer } from './ProxyTracer'; +import { NoopTracerProvider } from './NoopTracerProvider'; +const NOOP_TRACER_PROVIDER = new NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export class ProxyTracerProvider { + /** + * Get a {@link ProxyTracer} + */ + getTracer(name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version, options)); + } + getDelegate() { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + } + /** + * Set the delegate tracer provider + */ + setDelegate(delegate) { + this._delegate = delegate; + } + getDelegateTracer(name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + } +} +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js.map new file mode 100644 index 0000000..fc11723 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAG1D,MAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH,MAAM,OAAO,mBAAmB;IAG9B;;OAEG;IACH,SAAS,CAAC,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,WAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,WAAW;;QACT,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,iBAAiB,CACf,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.d.ts new file mode 100644 index 0000000..c847eaf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.d.ts @@ -0,0 +1,31 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js new file mode 100644 index 0000000..22a60a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js.map new file mode 100644 index 0000000..66719b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.d.ts new file mode 100644 index 0000000..f2bb495 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.d.ts @@ -0,0 +1,49 @@ +import { SpanAttributes } from './attributes'; +import { TraceState } from './trace_state'; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; + /** + * A {@link TraceState} that will be associated with the {@link Span} through + * the new {@link SpanContext}. Samplers SHOULD return the TraceState from + * the passed-in {@link Context} if they do not intend to change it. Leaving + * the value undefined will also leave the TraceState unchanged. + */ + traceState?: TraceState; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js new file mode 100644 index 0000000..be65741 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision || (SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js.map new file mode 100644 index 0000000..fd549c8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH;;;;GAIG;AACH,MAAM,CAAN,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,KAAhB,gBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { TraceState } from './trace_state';\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n /**\n * A {@link TraceState} that will be associated with the {@link Span} through\n * the new {@link SpanContext}. Samplers SHOULD return the TraceState from\n * the passed-in {@link Context} if they do not intend to change it. Leaving\n * the value undefined will also leave the TraceState unchanged.\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.d.ts new file mode 100644 index 0000000..c804568 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js new file mode 100644 index 0000000..06b42b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js.map new file mode 100644 index 0000000..9132a33 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.d.ts new file mode 100644 index 0000000..a2a5d2a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js new file mode 100644 index 0000000..6f1b9a3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js.map new file mode 100644 index 0000000..2b02be7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.d.ts new file mode 100644 index 0000000..f35f794 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.d.ts @@ -0,0 +1,41 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Gets the span from the current context, if one exists. + */ +export declare function getActiveSpan(): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js new file mode 100644 index 0000000..5d113f1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js @@ -0,0 +1,73 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { createContextKey } from '../context/context'; +import { NonRecordingSpan } from './NonRecordingSpan'; +import { ContextAPI } from '../api/context'; +/** + * span key + */ +const SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +/** + * Gets the span from the current context, if one exists. + */ +export function getActiveSpan() { + return getSpan(ContextAPI.getInstance().active()); +} +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan(spanContext)); +} +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js.map new file mode 100644 index 0000000..a8917a4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5C;;GAEG;AACH,MAAM,QAAQ,GAAG,gBAAgB,CAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa;IAC3B,OAAO,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC;AACpD,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { ContextAPI } from '../api/context';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Gets the span from the current context, if one exists.\n */\nexport function getActiveSpan(): Span | undefined {\n return getSpan(ContextAPI.getInstance().active());\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.d.ts new file mode 100644 index 0000000..9ed5ecb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js new file mode 100644 index 0000000..3be2ea3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js @@ -0,0 +1,99 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { validateKey, validateValue } from './tracestate-validators'; +const MAX_TRACE_STATE_ITEMS = 32; +const MAX_TRACE_STATE_LEN = 512; +const LIST_MEMBERS_SEPARATOR = ','; +const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export class TraceStateImpl { + constructor(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + set(key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + const traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + } + unset(key) { + const traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + } + get(key) { + return this._internalState.get(key); + } + serialize() { + return this._keys() + .reduce((agg, key) => { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + } + _parse(rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce((agg, part) => { + const listMember = part.trim(); // Optional Whitespace (OWS) handling + const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + const key = listMember.slice(0, i); + const value = listMember.slice(i + 1, part.length); + if (validateKey(key) && validateValue(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + } + _keys() { + return Array.from(this._internalState.keys()).reverse(); + } + _clone() { + const traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + } +} +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js.map new file mode 100644 index 0000000..90f35e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAErE,MAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,MAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,MAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,MAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH,MAAM,OAAO,cAAc;IAGzB,YAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,GAAG,CAAC,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,KAAK,CAAC,GAAW;QACf,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,GAAG,CAAC,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,SAAS;QACP,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,CAAC,GAAa,EAAE,GAAG,EAAE,EAAE;YAC7B,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,MAAM,CAAC,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,CAAC,GAAwB,EAAE,IAAY,EAAE,EAAE;YACjD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,MAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,MAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,MAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,WAAW,CAAC,GAAG,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,KAAK;QACX,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,MAAM;QACZ,MAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;CACF","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.d.ts new file mode 100644 index 0000000..4917f99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js new file mode 100644 index 0000000..3a4f95f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; +const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; +const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); +const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js.map new file mode 100644 index 0000000..20d02b8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,MAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,MAAM,SAAS,GAAG,QAAQ,oBAAoB,SAAS,CAAC;AACxD,MAAM,gBAAgB,GAAG,WAAW,oBAAoB,gBAAgB,oBAAoB,QAAQ,CAAC;AACrG,MAAM,eAAe,GAAG,IAAI,MAAM,CAAC,OAAO,SAAS,IAAI,gBAAgB,IAAI,CAAC,CAAC;AAC7E,MAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,MAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.d.ts new file mode 100644 index 0000000..e3b51fe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js new file mode 100644 index 0000000..feea469 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceStateImpl } from './tracestate-impl'; +export function createTraceState(rawTraceState) { + return new TraceStateImpl(rawTraceState); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js.map new file mode 100644 index 0000000..91ba3d1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAEnD,MAAM,UAAU,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,cAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.d.ts new file mode 100644 index 0000000..e32dab9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js new file mode 100644 index 0000000..bd912f4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js @@ -0,0 +1,24 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceFlags } from './trace_flags'; +export const INVALID_SPANID = '0000000000000000'; +export const INVALID_TRACEID = '00000000000000000000000000000000'; +export const INVALID_SPAN_CONTEXT = { + traceId: INVALID_TRACEID, + spanId: INVALID_SPANID, + traceFlags: TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js.map new file mode 100644 index 0000000..9c337c4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,MAAM,CAAC,MAAM,cAAc,GAAG,kBAAkB,CAAC;AACjD,MAAM,CAAC,MAAM,eAAe,GAAG,kCAAkC,CAAC;AAClE,MAAM,CAAC,MAAM,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,eAAe;IACxB,MAAM,EAAE,cAAc;IACtB,UAAU,EAAE,UAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/link.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/link.d.ts new file mode 100644 index 0000000..8fc0106 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/link.d.ts @@ -0,0 +1,26 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; + /** Count of attributes of the link that were dropped due to collection limits */ + droppedAttributesCount?: number; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/link.js b/node_modules/@opentelemetry/api/build/esnext/trace/link.js new file mode 100644 index 0000000..7c8accb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/link.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/link.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/link.js.map new file mode 100644 index 0000000..c10b714 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n /** Count of attributes of the link that were dropped due to collection limits */\n droppedAttributesCount?: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/span.d.ts new file mode 100644 index 0000000..c5f2814 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span.js b/node_modules/@opentelemetry/api/build/esnext/trace/span.js new file mode 100644 index 0000000..f41c7f6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/span.js.map new file mode 100644 index 0000000..0a4f102 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.d.ts new file mode 100644 index 0000000..f30933a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js new file mode 100644 index 0000000..1bb88b0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js.map new file mode 100644 index 0000000..dbf0bfe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.d.ts new file mode 100644 index 0000000..a89846f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js new file mode 100644 index 0000000..1119df9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js @@ -0,0 +1,43 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js.map new file mode 100644 index 0000000..deb6be7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,KAAR,QAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.d.ts new file mode 100644 index 0000000..f191111 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js new file mode 100644 index 0000000..a51187a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants'; +import { NonRecordingSpan } from './NonRecordingSpan'; +const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +export function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID; +} +export function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID; +} +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export function wrapSpanContext(spanContext) { + return new NonRecordingSpan(spanContext); +} +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js.map new file mode 100644 index 0000000..9730fc7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3E,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,MAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,MAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,MAAM,UAAU,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,eAAe,CAAC;AAC1E,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,cAAc,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/status.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/status.d.ts new file mode 100644 index 0000000..ab19a68 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/status.js b/node_modules/@opentelemetry/api/build/esnext/trace/status.js new file mode 100644 index 0000000..5ee55e4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/status.js @@ -0,0 +1,20 @@ +/** + * An enumeration of status codes. + */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/status.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/status.js.map new file mode 100644 index 0000000..af7e7d7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":"AAsBA;;GAEG;AACH,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.d.ts new file mode 100644 index 0000000..11288ba --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js new file mode 100644 index 0000000..8a7b000 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags || (TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js.map new file mode 100644 index 0000000..2ea8680 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,KAAV,UAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.d.ts new file mode 100644 index 0000000..f275b8b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js new file mode 100644 index 0000000..a6c368f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js.map new file mode 100644 index 0000000..64a3d7a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.d.ts new file mode 100644 index 0000000..2509089 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js new file mode 100644 index 0000000..ad066dc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js.map new file mode 100644 index 0000000..77f6ae9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.d.ts new file mode 100644 index 0000000..f3bbccf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js new file mode 100644 index 0000000..470a3a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js.map new file mode 100644 index 0000000..70365af --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.d.ts new file mode 100644 index 0000000..9b2f7a9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js new file mode 100644 index 0000000..adf432a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js.map new file mode 100644 index 0000000..bfc1cbd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/version.d.ts b/node_modules/@opentelemetry/api/build/esnext/version.d.ts new file mode 100644 index 0000000..5088f1b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.8.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/version.js b/node_modules/@opentelemetry/api/build/esnext/version.js new file mode 100644 index 0000000..77a98c4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/version.js @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// this is autogenerated file, see scripts/version-update.js +export const VERSION = '1.8.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/version.js.map b/node_modules/@opentelemetry/api/build/esnext/version.js.map new file mode 100644 index 0000000..63cc330 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esnext/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,4DAA4D;AAC5D,MAAM,CAAC,MAAM,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.8.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.d.ts b/node_modules/@opentelemetry/api/build/src/api/context.d.ts new file mode 100644 index 0000000..61caee8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js b/node_modules/@opentelemetry/api/build/src/api/context.js new file mode 100644 index 0000000..8af551f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.js @@ -0,0 +1,81 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ContextAPI = void 0; +const NoopContextManager_1 = require("../context/NoopContextManager"); +const global_utils_1 = require("../internal/global-utils"); +const diag_1 = require("./diag"); +const API_NAME = 'context'; +const NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +class ContextAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Context API */ + static getInstance() { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + } + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager) { + return (0, global_utils_1.registerGlobal)(API_NAME, contextManager, diag_1.DiagAPI.instance()); + } + /** + * Get the currently active context + */ + active() { + return this._getContextManager().active(); + } + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with(context, fn, thisArg, ...args) { + return this._getContextManager().with(context, fn, thisArg, ...args); + } + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context, target) { + return this._getContextManager().bind(context, target); + } + _getContextManager() { + return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_CONTEXT_MANAGER; + } + /** Disable and remove the global context manager */ + disable() { + this._getContextManager().disable(); + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } +} +exports.ContextAPI = ContextAPI; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js.map b/node_modules/@opentelemetry/api/build/src/api/context.js.map new file mode 100644 index 0000000..78c56c1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sEAAmE;AAEnE,2DAIkC;AAClC,iCAAiC;AAEjC,MAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,MAAM,oBAAoB,GAAG,IAAI,uCAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH,MAAa,UAAU;IAGrB,+FAA+F;IAC/F,gBAAuB,CAAC;IAExB,oDAAoD;IAC7C,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,uBAAuB,CAAC,cAA8B;QAC3D,OAAO,IAAA,6BAAc,EAAC,QAAQ,EAAE,cAAc,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,MAAM;QACX,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CACT,OAAgB,EAChB,EAAK,EACL,OAA8B,EAC9B,GAAG,IAAO;QAEV,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IACvE,CAAC;IAED;;;;;OAKG;IACI,IAAI,CAAI,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,kBAAkB;QACxB,OAAO,IAAA,wBAAS,EAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,OAAO;QACZ,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,IAAA,+BAAgB,EAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;CACF;AAnED,gCAmEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.d.ts b/node_modules/@opentelemetry/api/build/src/api/diag.d.ts new file mode 100644 index 0000000..131db17 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.d.ts @@ -0,0 +1,30 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLoggerApi } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger, DiagLoggerApi { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + setLogger: DiagLoggerApi['setLogger']; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js b/node_modules/@opentelemetry/api/build/src/api/diag.js new file mode 100644 index 0000000..9456923 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.js @@ -0,0 +1,93 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagAPI = void 0; +const ComponentLogger_1 = require("../diag/ComponentLogger"); +const logLevelLogger_1 = require("../diag/internal/logLevelLogger"); +const types_1 = require("../diag/types"); +const global_utils_1 = require("../internal/global-utils"); +const API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +class DiagAPI { + /** + * Private internal constructor + * @private + */ + constructor() { + function _logProxy(funcName) { + return function (...args) { + const logger = (0, global_utils_1.getGlobal)('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName](...args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + const self = this; + // DiagAPI specific functions + const setLogger = (logger, optionsOrLogLevel = { logLevel: types_1.DiagLogLevel.INFO }) => { + var _a, _b, _c; + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + if (typeof optionsOrLogLevel === 'number') { + optionsOrLogLevel = { + logLevel: optionsOrLogLevel, + }; + } + const oldLogger = (0, global_utils_1.getGlobal)('diag'); + const newLogger = (0, logLevelLogger_1.createLogLevelDiagLogger)((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : types_1.DiagLogLevel.INFO, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { + const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; + oldLogger.warn(`Current logger will be overwritten from ${stack}`); + newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); + } + return (0, global_utils_1.registerGlobal)('diag', newLogger, self, true); + }; + self.setLogger = setLogger; + self.disable = () => { + (0, global_utils_1.unregisterGlobal)(API_NAME, self); + }; + self.createComponentLogger = (options) => { + return new ComponentLogger_1.DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + static instance() { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + } +} +exports.DiagAPI = DiagAPI; +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js.map b/node_modules/@opentelemetry/api/build/src/api/diag.js.map new file mode 100644 index 0000000..868e19c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,6DAA8D;AAC9D,oEAA2E;AAC3E,yCAMuB;AACvB,2DAIkC;AAElC,MAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH,MAAa,OAAO;IAYlB;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO,UAAU,GAAG,IAAI;gBACtB,MAAM,MAAM,GAAG,IAAA,wBAAS,EAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,MAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,MAAM,SAAS,GAA+B,CAC5C,MAAM,EACN,iBAAiB,GAAG,EAAE,QAAQ,EAAE,oBAAY,CAAC,IAAI,EAAE,EACnD,EAAE;;YACF,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,MAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAI,OAAO,iBAAiB,KAAK,QAAQ,EAAE;gBACzC,iBAAiB,GAAG;oBAClB,QAAQ,EAAE,iBAAiB;iBAC5B,CAAC;aACH;YAED,MAAM,SAAS,GAAG,IAAA,wBAAS,EAAC,MAAM,CAAC,CAAC;YACpC,MAAM,SAAS,GAAG,IAAA,yCAAwB,EACxC,MAAA,iBAAiB,CAAC,QAAQ,mCAAI,oBAAY,CAAC,IAAI,EAC/C,MAAM,CACP,CAAC;YACF,kFAAkF;YAClF,IAAI,SAAS,IAAI,CAAC,iBAAiB,CAAC,uBAAuB,EAAE;gBAC3D,MAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,2CAA2C,KAAK,EAAE,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,6DAA6D,KAAK,EAAE,CACrE,CAAC;aACH;YAED,OAAO,IAAA,6BAAc,EAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,CAAC,OAAO,GAAG,GAAG,EAAE;YAClB,IAAA,+BAAgB,EAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,CAAC,OAA+B,EAAE,EAAE;YAC/D,OAAO,IAAI,qCAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAjFD,oDAAoD;IAC7C,MAAM,CAAC,QAAQ;QACpB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;CA+FF;AAzGD,0BAyGC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLoggerApi,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger, DiagLoggerApi {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n const setLogger: DiagLoggerApi['setLogger'] = (\n logger,\n optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n if (typeof optionsOrLogLevel === 'number') {\n optionsOrLogLevel = {\n logLevel: optionsOrLogLevel,\n };\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(\n optionsOrLogLevel.logLevel ?? DiagLogLevel.INFO,\n logger\n );\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.setLogger = setLogger;\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n public setLogger!: DiagLoggerApi['setLogger'];\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/metrics.d.ts b/node_modules/@opentelemetry/api/build/src/api/metrics.d.ts new file mode 100644 index 0000000..5adc145 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/metrics.d.ts @@ -0,0 +1,28 @@ +import { Meter, MeterOptions } from '../metrics/Meter'; +import { MeterProvider } from '../metrics/MeterProvider'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +export declare class MetricsAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Metrics API */ + static getInstance(): MetricsAPI; + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider: MeterProvider): boolean; + /** + * Returns the global meter provider. + */ + getMeterProvider(): MeterProvider; + /** + * Returns a meter from the global meter provider. + */ + getMeter(name: string, version?: string, options?: MeterOptions): Meter; + /** Remove the global meter provider */ + disable(): void; +} +//# sourceMappingURL=metrics.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/metrics.js b/node_modules/@opentelemetry/api/build/src/api/metrics.js new file mode 100644 index 0000000..4bbc433 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/metrics.js @@ -0,0 +1,61 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MetricsAPI = void 0; +const NoopMeterProvider_1 = require("../metrics/NoopMeterProvider"); +const global_utils_1 = require("../internal/global-utils"); +const diag_1 = require("./diag"); +const API_NAME = 'metrics'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Metrics API + */ +class MetricsAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { } + /** Get the singleton instance of the Metrics API */ + static getInstance() { + if (!this._instance) { + this._instance = new MetricsAPI(); + } + return this._instance; + } + /** + * Set the current global meter provider. + * Returns true if the meter provider was successfully registered, else false. + */ + setGlobalMeterProvider(provider) { + return (0, global_utils_1.registerGlobal)(API_NAME, provider, diag_1.DiagAPI.instance()); + } + /** + * Returns the global meter provider. + */ + getMeterProvider() { + return (0, global_utils_1.getGlobal)(API_NAME) || NoopMeterProvider_1.NOOP_METER_PROVIDER; + } + /** + * Returns a meter from the global meter provider. + */ + getMeter(name, version, options) { + return this.getMeterProvider().getMeter(name, version, options); + } + /** Remove the global meter provider */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } +} +exports.MetricsAPI = MetricsAPI; +//# sourceMappingURL=metrics.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/metrics.js.map b/node_modules/@opentelemetry/api/build/src/api/metrics.js.map new file mode 100644 index 0000000..3431094 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/metrics.js.map @@ -0,0 +1 @@ +{"version":3,"file":"metrics.js","sourceRoot":"","sources":["../../../src/api/metrics.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,oEAAmE;AACnE,2DAIkC;AAClC,iCAAiC;AAEjC,MAAM,QAAQ,GAAG,SAAS,CAAC;AAE3B;;GAEG;AACH,MAAa,UAAU;IAGrB,+FAA+F;IAC/F,gBAAuB,CAAC;IAExB,oDAAoD;IAC7C,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAAC,QAAuB;QACnD,OAAO,IAAA,6BAAc,EAAC,QAAQ,EAAE,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;OAEG;IACI,gBAAgB;QACrB,OAAO,IAAA,wBAAS,EAAC,QAAQ,CAAC,IAAI,uCAAmB,CAAC;IACpD,CAAC;IAED;;OAEG;IACI,QAAQ,CACb,IAAY,EACZ,OAAgB,EAChB,OAAsB;QAEtB,OAAO,IAAI,CAAC,gBAAgB,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAClE,CAAC;IAED,uCAAuC;IAChC,OAAO;QACZ,IAAA,+BAAgB,EAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;CACF;AA7CD,gCA6CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from '../metrics/Meter';\nimport { MeterProvider } from '../metrics/MeterProvider';\nimport { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'metrics';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Metrics API\n */\nexport class MetricsAPI {\n private static _instance?: MetricsAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Metrics API */\n public static getInstance(): MetricsAPI {\n if (!this._instance) {\n this._instance = new MetricsAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global meter provider.\n * Returns true if the meter provider was successfully registered, else false.\n */\n public setGlobalMeterProvider(provider: MeterProvider): boolean {\n return registerGlobal(API_NAME, provider, DiagAPI.instance());\n }\n\n /**\n * Returns the global meter provider.\n */\n public getMeterProvider(): MeterProvider {\n return getGlobal(API_NAME) || NOOP_METER_PROVIDER;\n }\n\n /**\n * Returns a meter from the global meter provider.\n */\n public getMeter(\n name: string,\n version?: string,\n options?: MeterOptions\n ): Meter {\n return this.getMeterProvider().getMeter(name, version, options);\n }\n\n /** Remove the global meter provider */\n public disable(): void {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts new file mode 100644 index 0000000..a22d24d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts @@ -0,0 +1,49 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + getActiveBaggage: typeof getActiveBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js b/node_modules/@opentelemetry/api/build/src/api/propagation.js new file mode 100644 index 0000000..7f03df8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.js @@ -0,0 +1,89 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PropagationAPI = void 0; +const global_utils_1 = require("../internal/global-utils"); +const NoopTextMapPropagator_1 = require("../propagation/NoopTextMapPropagator"); +const TextMapPropagator_1 = require("../propagation/TextMapPropagator"); +const context_helpers_1 = require("../baggage/context-helpers"); +const utils_1 = require("../baggage/utils"); +const diag_1 = require("./diag"); +const API_NAME = 'propagation'; +const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +class PropagationAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this.createBaggage = utils_1.createBaggage; + this.getBaggage = context_helpers_1.getBaggage; + this.getActiveBaggage = context_helpers_1.getActiveBaggage; + this.setBaggage = context_helpers_1.setBaggage; + this.deleteBaggage = context_helpers_1.deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + static getInstance() { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + } + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator) { + return (0, global_utils_1.registerGlobal)(API_NAME, propagator, diag_1.DiagAPI.instance()); + } + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context, carrier, setter = TextMapPropagator_1.defaultTextMapSetter) { + return this._getGlobalPropagator().inject(context, carrier, setter); + } + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context, carrier, getter = TextMapPropagator_1.defaultTextMapGetter) { + return this._getGlobalPropagator().extract(context, carrier, getter); + } + /** + * Return a list of all fields which may be used by the propagator. + */ + fields() { + return this._getGlobalPropagator().fields(); + } + /** Remove the global propagator */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + } + _getGlobalPropagator() { + return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + } +} +exports.PropagationAPI = PropagationAPI; +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js.map b/node_modules/@opentelemetry/api/build/src/api/propagation.js.map new file mode 100644 index 0000000..bb3b7e1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,2DAIkC;AAClC,gFAA6E;AAC7E,wEAM0C;AAC1C,gEAKoC;AACpC,4CAAiD;AACjD,iCAAiC;AAEjC,MAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,MAAM,wBAAwB,GAAG,IAAI,6CAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH,MAAa,cAAc;IAGzB,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,qBAAa,CAAC;QAE9B,eAAU,GAAG,4BAAU,CAAC;QAExB,qBAAgB,GAAG,kCAAgB,CAAC;QAEpC,eAAU,GAAG,4BAAU,CAAC;QAExB,kBAAa,GAAG,+BAAa,CAAC;IAtEd,CAAC;IAExB,uDAAuD;IAChD,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,mBAAmB,CAAC,UAA6B;QACtD,OAAO,IAAA,6BAAc,EAAC,QAAQ,EAAE,UAAU,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,MAAM,CACX,OAAgB,EAChB,OAAgB,EAChB,SAAiC,wCAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,OAAO,CACZ,OAAgB,EAChB,OAAgB,EAChB,SAAiC,wCAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,MAAM;QACX,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,OAAO;QACZ,IAAA,+BAAgB,EAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAYO,oBAAoB;QAC1B,OAAO,IAAA,wBAAS,EAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;CACF;AA/ED,wCA+EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n getActiveBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public getActiveBaggage = getActiveBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.d.ts b/node_modules/@opentelemetry/api/build/src/api/trace.d.ts new file mode 100644 index 0000000..df59fd2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.d.ts @@ -0,0 +1,40 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getActiveSpan: typeof getActiveSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js b/node_modules/@opentelemetry/api/build/src/api/trace.js new file mode 100644 index 0000000..aa7a9da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.js @@ -0,0 +1,79 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceAPI = void 0; +const global_utils_1 = require("../internal/global-utils"); +const ProxyTracerProvider_1 = require("../trace/ProxyTracerProvider"); +const spancontext_utils_1 = require("../trace/spancontext-utils"); +const context_utils_1 = require("../trace/context-utils"); +const diag_1 = require("./diag"); +const API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +class TraceAPI { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + constructor() { + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; + this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; + this.deleteSpan = context_utils_1.deleteSpan; + this.getSpan = context_utils_1.getSpan; + this.getActiveSpan = context_utils_1.getActiveSpan; + this.getSpanContext = context_utils_1.getSpanContext; + this.setSpan = context_utils_1.setSpan; + this.setSpanContext = context_utils_1.setSpanContext; + } + /** Get the singleton instance of the Trace API */ + static getInstance() { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + } + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider) { + const success = (0, global_utils_1.registerGlobal)(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + } + /** + * Returns the global tracer provider. + */ + getTracerProvider() { + return (0, global_utils_1.getGlobal)(API_NAME) || this._proxyTracerProvider; + } + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name, version) { + return this.getTracerProvider().getTracer(name, version); + } + /** Remove the global tracer provider */ + disable() { + (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + } +} +exports.TraceAPI = TraceAPI; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js.map b/node_modules/@opentelemetry/api/build/src/api/trace.js.map new file mode 100644 index 0000000..9bd3112 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2DAIkC;AAClC,sEAAmE;AACnE,kEAGoC;AAGpC,0DAOgC;AAChC,iCAAiC;AAEjC,MAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH,MAAa,QAAQ;IAKnB,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,yCAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,mCAAe,CAAC;QAElC,uBAAkB,GAAG,sCAAkB,CAAC;QAExC,eAAU,GAAG,0BAAU,CAAC;QAExB,YAAO,GAAG,uBAAO,CAAC;QAElB,kBAAa,GAAG,6BAAa,CAAC;QAE9B,mBAAc,GAAG,8BAAc,CAAC;QAEhC,YAAO,GAAG,uBAAO,CAAC;QAElB,mBAAc,GAAG,8BAAc,CAAC;IA9DhB,CAAC;IAExB,kDAAkD;IAC3C,MAAM,CAAC,WAAW;QACvB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,uBAAuB,CAAC,QAAwB;QACrD,MAAM,OAAO,GAAG,IAAA,6BAAc,EAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,cAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,iBAAiB;QACtB,OAAO,IAAA,wBAAS,EAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,SAAS,CAAC,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,OAAO;QACZ,IAAA,+BAAgB,EAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,yCAAmB,EAAE,CAAC;IACxD,CAAC;CAiBF;AArED,4BAqEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getActiveSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getActiveSpan = getActiveSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts new file mode 100644 index 0000000..23750eb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts @@ -0,0 +1,29 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getActiveBaggage(): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js new file mode 100644 index 0000000..cc0f00b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js @@ -0,0 +1,63 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deleteBaggage = exports.setBaggage = exports.getActiveBaggage = exports.getBaggage = void 0; +const context_1 = require("../api/context"); +const context_2 = require("../context/context"); +/** + * Baggage key + */ +const BAGGAGE_KEY = (0, context_2.createContextKey)('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * Retrieve the current baggage from the active/current context + * + * @returns {Baggage} Extracted baggage from the context + */ +function getActiveBaggage() { + return getBaggage(context_1.ContextAPI.getInstance().active()); +} +exports.getActiveBaggage = getActiveBaggage; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +exports.deleteBaggage = deleteBaggage; +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map new file mode 100644 index 0000000..ba60094 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,4CAA4C;AAC5C,gDAAsD;AAItD;;GAEG;AACH,MAAM,WAAW,GAAG,IAAA,0BAAgB,EAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAFD,gCAEC;AAED;;;;GAIG;AACH,SAAgB,gBAAgB;IAC9B,OAAO,UAAU,CAAC,oBAAU,CAAC,WAAW,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC;AACvD,CAAC;AAFD,4CAEC;AAED;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAFD,gCAEC;AAED;;;;GAIG;AACH,SAAgB,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC;AAFD,sCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Retrieve the current baggage from the active/current context\n *\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getActiveBaggage(): Baggage | undefined {\n return getBaggage(ContextAPI.getInstance().active());\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts new file mode 100644 index 0000000..e6b4554 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js new file mode 100644 index 0000000..6f04d4a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js @@ -0,0 +1,55 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaggageImpl = void 0; +class BaggageImpl { + constructor(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + getEntry(key) { + const entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + } + getAllEntries() { + return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); + } + setEntry(key, entry) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + } + removeEntry(key) { + const newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + } + removeEntries(...keys) { + const newBaggage = new BaggageImpl(this._entries); + for (const key of keys) { + newBaggage._entries.delete(key); + } + return newBaggage; + } + clear() { + return new BaggageImpl(); + } +} +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map new file mode 100644 index 0000000..e95c4bc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,MAAa,WAAW;IAGtB,YAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,QAAQ,CAAC,GAAW;QAClB,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,aAAa;QACX,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACrE,CAAC;IAED,QAAQ,CAAC,GAAW,EAAE,KAAmB;QACvC,MAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,WAAW,CAAC,GAAW;QACrB,MAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,aAAa,CAAC,GAAG,IAAc;QAC7B,MAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;YACtB,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACjC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,KAAK;QACH,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;CACF;AA3CD,kCA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts new file mode 100644 index 0000000..9cd991c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js new file mode 100644 index 0000000..324c216 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js @@ -0,0 +1,23 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataSymbol = void 0; +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map new file mode 100644 index 0000000..497f362 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH;;GAEG;AACU,QAAA,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts new file mode 100644 index 0000000..32fa0ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js b/node_modules/@opentelemetry/api/build/src/baggage/types.js new file mode 100644 index 0000000..c428c6d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js.map b/node_modules/@opentelemetry/api/build/src/baggage/types.js.map new file mode 100644 index 0000000..2a00da4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts new file mode 100644 index 0000000..9955d9e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js b/node_modules/@opentelemetry/api/build/src/baggage/utils.js new file mode 100644 index 0000000..a0bfbf6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.js @@ -0,0 +1,51 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +const diag_1 = require("../api/diag"); +const baggage_impl_1 = require("./internal/baggage-impl"); +const symbol_1 = require("./internal/symbol"); +const diag = diag_1.DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries = {}) { + return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString() { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map new file mode 100644 index 0000000..d3e6ee8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sCAAsC;AACtC,0DAAsD;AACtD,8CAA+D;AAG/D,MAAM,IAAI,GAAG,cAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,SAAgB,aAAa,CAC3B,UAAwC,EAAE;IAE1C,OAAO,IAAI,0BAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAJD,sCAIC;AAED;;;;;GAKG;AACH,SAAgB,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,qDAAqD,OAAO,GAAG,EAAE,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,mCAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC;AAhBD,wEAgBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts new file mode 100644 index 0000000..19994fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js b/node_modules/@opentelemetry/api/build/src/common/Attributes.js new file mode 100644 index 0000000..684c93d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map new file mode 100644 index 0000000..81b1168 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts new file mode 100644 index 0000000..e175a7f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js b/node_modules/@opentelemetry/api/build/src/common/Exception.js new file mode 100644 index 0000000..ed450ae --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js.map b/node_modules/@opentelemetry/api/build/src/common/Exception.js.map new file mode 100644 index 0000000..459c350 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.d.ts b/node_modules/@opentelemetry/api/build/src/common/Time.d.ts new file mode 100644 index 0000000..cc3c502 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js b/node_modules/@opentelemetry/api/build/src/common/Time.js new file mode 100644 index 0000000..1faaf69 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js.map b/node_modules/@opentelemetry/api/build/src/common/Time.js.map new file mode 100644 index 0000000..ae124f0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context-api.d.ts b/node_modules/@opentelemetry/api/build/src/context-api.d.ts new file mode 100644 index 0000000..650f4ee --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context-api.d.ts @@ -0,0 +1,4 @@ +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +//# sourceMappingURL=context-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context-api.js b/node_modules/@opentelemetry/api/build/src/context-api.js new file mode 100644 index 0000000..b9aeea9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context-api.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.context = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const context_1 = require("./api/context"); +/** Entrypoint for context API */ +exports.context = context_1.ContextAPI.getInstance(); +//# sourceMappingURL=context-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context-api.js.map b/node_modules/@opentelemetry/api/build/src/context-api.js.map new file mode 100644 index 0000000..e8e3f92 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-api.js","sourceRoot":"","sources":["../../src/context-api.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,2CAA2C;AAC3C,iCAAiC;AACpB,QAAA,OAAO,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts new file mode 100644 index 0000000..48a1659 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js new file mode 100644 index 0000000..10c6ae1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js @@ -0,0 +1,38 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopContextManager = void 0; +const context_1 = require("./context"); +class NoopContextManager { + active() { + return context_1.ROOT_CONTEXT; + } + with(_context, fn, thisArg, ...args) { + return fn.call(thisArg, ...args); + } + bind(_context, target) { + return target; + } + enable() { + return this; + } + disable() { + return this; + } +} +exports.NoopContextManager = NoopContextManager; +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map new file mode 100644 index 0000000..46b0612 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,uCAAyC;AAGzC,MAAa,kBAAkB;IAC7B,MAAM;QACJ,OAAO,sBAAY,CAAC;IACtB,CAAC;IAED,IAAI,CACF,QAAuB,EACvB,EAAK,EACL,OAA8B,EAC9B,GAAG,IAAO;QAEV,OAAO,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IACnC,CAAC;IAED,IAAI,CAAI,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM;QACJ,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAzBD,gDAyBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.d.ts b/node_modules/@opentelemetry/api/build/src/context/context.d.ts new file mode 100644 index 0000000..8be0259 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js b/node_modules/@opentelemetry/api/build/src/context/context.js new file mode 100644 index 0000000..eecc159 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.js @@ -0,0 +1,55 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ROOT_CONTEXT = exports.createContextKey = void 0; +/** Get a key to uniquely identify a context value */ +function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +exports.createContextKey = createContextKey; +class BaseContext { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + constructor(parentContext) { + // for minification + const self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = (key) => self._currentContext.get(key); + self.setValue = (key, value) => { + const context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = (key) => { + const context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } +} +/** The root context is used as the default parent context when there is no active context */ +exports.ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js.map b/node_modules/@opentelemetry/api/build/src/context/context.js.map new file mode 100644 index 0000000..e46cf8d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,qDAAqD;AACrD,SAAgB,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AARD,4CAQC;AAED,MAAM,WAAW;IAGf;;;;OAIG;IACH,YAAY,aAAoC;QAC9C,mBAAmB;QACnB,MAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,CAAC,GAAW,EAAE,EAAE,CAAC,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,CAAC,GAAW,EAAE,KAAc,EAAW,EAAE;YACvD,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,CAAC,GAAW,EAAW,EAAE;YAC1C,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;CAyBF;AAED,6FAA6F;AAChF,QAAA,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.d.ts b/node_modules/@opentelemetry/api/build/src/context/types.d.ts new file mode 100644 index 0000000..7e86632 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js b/node_modules/@opentelemetry/api/build/src/context/types.js new file mode 100644 index 0000000..c428c6d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js.map b/node_modules/@opentelemetry/api/build/src/context/types.js.map new file mode 100644 index 0000000..8bde9ce --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag-api.d.ts b/node_modules/@opentelemetry/api/build/src/diag-api.d.ts new file mode 100644 index 0000000..d82fdb1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag-api.d.ts @@ -0,0 +1,9 @@ +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +//# sourceMappingURL=diag-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag-api.js b/node_modules/@opentelemetry/api/build/src/diag-api.js new file mode 100644 index 0000000..cbf28db --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag-api.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.diag = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const diag_1 = require("./api/diag"); +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +exports.diag = diag_1.DiagAPI.instance(); +//# sourceMappingURL=diag-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag-api.js.map b/node_modules/@opentelemetry/api/build/src/diag-api.js.map new file mode 100644 index 0000000..f87b0fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag-api.js","sourceRoot":"","sources":["../../src/diag-api.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,qCAAqC;AACrC;;;;;GAKG;AACU,QAAA,IAAI,GAAG,cAAO,CAAC,QAAQ,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { DiagAPI } from './api/diag';\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts new file mode 100644 index 0000000..f060950 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js new file mode 100644 index 0000000..579b7e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js @@ -0,0 +1,59 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagComponentLogger = void 0; +const global_utils_1 = require("../internal/global-utils"); +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +class DiagComponentLogger { + constructor(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + debug(...args) { + return logProxy('debug', this._namespace, args); + } + error(...args) { + return logProxy('error', this._namespace, args); + } + info(...args) { + return logProxy('info', this._namespace, args); + } + warn(...args) { + return logProxy('warn', this._namespace, args); + } + verbose(...args) { + return logProxy('verbose', this._namespace, args); + } +} +exports.DiagComponentLogger = DiagComponentLogger; +function logProxy(funcName, namespace, args) { + const logger = (0, global_utils_1.getGlobal)('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName](...args); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map new file mode 100644 index 0000000..c1b8504 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2DAAqD;AAGrD;;;;;;;;GAQG;AACH,MAAa,mBAAmB;IAG9B,YAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,KAAK,CAAC,GAAG,IAAW;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,KAAK,CAAC,GAAG,IAAW;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,IAAI,CAAC,GAAG,IAAW;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,IAAI,CAAC,GAAG,IAAW;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,OAAO,CAAC,GAAG,IAAW;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;CACF;AA1BD,kDA0BC;AAED,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,MAAM,MAAM,GAAG,IAAA,wBAAS,EAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,GAAI,IAAoC,CAAC,CAAC;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts new file mode 100644 index 0000000..fa3db1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js new file mode 100644 index 0000000..1962275 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js @@ -0,0 +1,57 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagConsoleLogger = void 0; +const consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +class DiagConsoleLogger { + constructor() { + function _consoleFunc(funcName) { + return function (...args) { + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + let theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (let i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } +} +exports.DiagConsoleLogger = DiagConsoleLogger; +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map new file mode 100644 index 0000000..d57fe77 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,MAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH,MAAa,iBAAiB;IAC5B;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO,UAAU,GAAG,IAAI;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;CAkCF;AA3DD,8CA2DC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts new file mode 100644 index 0000000..890b9f1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js new file mode 100644 index 0000000..ee1702e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js @@ -0,0 +1,45 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createLogLevelDiagLogger = void 0; +const types_1 = require("../types"); +function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < types_1.DiagLogLevel.NONE) { + maxLevel = types_1.DiagLogLevel.NONE; + } + else if (maxLevel > types_1.DiagLogLevel.ALL) { + maxLevel = types_1.DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + const theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', types_1.DiagLogLevel.ERROR), + warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), + info: _filterFunc('info', types_1.DiagLogLevel.INFO), + debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), + }; +} +exports.createLogLevelDiagLogger = createLogLevelDiagLogger; +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map new file mode 100644 index 0000000..1148835 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,oCAAqE;AAErE,SAAgB,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,oBAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,oBAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,oBAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,oBAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,oBAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,oBAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,oBAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,oBAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,oBAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC;AAhCD,4DAgCC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts new file mode 100644 index 0000000..ac71ee3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js new file mode 100644 index 0000000..4091631 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createNoopDiagLogger = void 0; +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +exports.createNoopDiagLogger = createNoopDiagLogger; +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map new file mode 100644 index 0000000..20e0e81 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,SAAgB,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC;AARD,oDAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.d.ts b/node_modules/@opentelemetry/api/build/src/diag/types.d.ts new file mode 100644 index 0000000..e992cc5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.d.ts @@ -0,0 +1,100 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +export interface DiagLoggerOptions { + /** + * The {@link DiagLogLevel} used to filter logs sent to the logger. + * + * @defaultValue DiagLogLevel.INFO + */ + logLevel?: DiagLogLevel; + /** + * Setting this value to `true` will suppress the warning message normally emitted when registering a logger when another logger is already registered. + */ + suppressOverrideMessage?: boolean; +} +export interface DiagLoggerApi { + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param options - A {@link DiagLoggerOptions} object. If not provided, default values will be set. + * @returns `true` if the logger was successfully registered, else `false` + */ + setLogger(logger: DiagLogger, options?: DiagLoggerOptions): boolean; + /** + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param logLevel - The {@link DiagLogLevel} used to filter logs sent to the logger. If not provided it will default to {@link DiagLogLevel.INFO}. + * @returns `true` if the logger was successfully registered, else `false` + */ + setLogger(logger: DiagLogger, logLevel?: DiagLogLevel): boolean; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js b/node_modules/@opentelemetry/api/build/src/diag/types.js new file mode 100644 index 0000000..c195e45 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.js @@ -0,0 +1,44 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagLogLevel = void 0; +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js.map b/node_modules/@opentelemetry/api/build/src/diag/types.js.map new file mode 100644 index 0000000..ee8afca --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AA+CH;;;;GAIG;AACH,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n\nexport interface DiagLoggerOptions {\n /**\n * The {@link DiagLogLevel} used to filter logs sent to the logger.\n *\n * @defaultValue DiagLogLevel.INFO\n */\n logLevel?: DiagLogLevel;\n\n /**\n * Setting this value to `true` will suppress the warning message normally emitted when registering a logger when another logger is already registered.\n */\n suppressOverrideMessage?: boolean;\n}\n\nexport interface DiagLoggerApi {\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - The {@link DiagLogger} instance to set as the default logger.\n * @param options - A {@link DiagLoggerOptions} object. If not provided, default values will be set.\n * @returns `true` if the logger was successfully registered, else `false`\n */\n setLogger(logger: DiagLogger, options?: DiagLoggerOptions): boolean;\n\n /**\n *\n * @param logger - The {@link DiagLogger} instance to set as the default logger.\n * @param logLevel - The {@link DiagLogLevel} used to filter logs sent to the logger. If not provided it will default to {@link DiagLogLevel.INFO}.\n * @returns `true` if the logger was successfully registered, else `false`\n */\n setLogger(logger: DiagLogger, logLevel?: DiagLogLevel): boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/index.d.ts b/node_modules/@opentelemetry/api/build/src/experimental/index.d.ts new file mode 100644 index 0000000..bec3965 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/index.d.ts @@ -0,0 +1,3 @@ +export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; +export { SugaredSpanOptions } from './trace/SugaredOptions'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/index.js b/node_modules/@opentelemetry/api/build/src/experimental/index.js new file mode 100644 index 0000000..bd611ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/index.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SugaredTracer = exports.wrapTracer = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var SugaredTracer_1 = require("./trace/SugaredTracer"); +Object.defineProperty(exports, "wrapTracer", { enumerable: true, get: function () { return SugaredTracer_1.wrapTracer; } }); +Object.defineProperty(exports, "SugaredTracer", { enumerable: true, get: function () { return SugaredTracer_1.SugaredTracer; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/index.js.map b/node_modules/@opentelemetry/api/build/src/experimental/index.js.map new file mode 100644 index 0000000..f0ed983 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/experimental/index.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,uDAAkE;AAAzD,2GAAA,UAAU,OAAA;AAAE,8GAAA,aAAa,OAAA","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport { wrapTracer, SugaredTracer } from './trace/SugaredTracer';\nexport { SugaredSpanOptions } from './trace/SugaredOptions';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.d.ts b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.d.ts new file mode 100644 index 0000000..89040af --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.d.ts @@ -0,0 +1,13 @@ +import { Span, SpanOptions } from '../../'; +/** + * Options needed for span creation + */ +export interface SugaredSpanOptions extends SpanOptions { + /** + * function to overwrite default exception behavior to record the exception. No exceptions should be thrown in the function. + * @param e Error which triggered this exception + * @param span current span from context + */ + onException?: (e: Error, span: Span) => void; +} +//# sourceMappingURL=SugaredOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js new file mode 100644 index 0000000..a18d65b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=SugaredOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js.map b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js.map new file mode 100644 index 0000000..19ded54 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SugaredOptions.js","sourceRoot":"","sources":["../../../../src/experimental/trace/SugaredOptions.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Span, SpanOptions } from '../../';\n\n/**\n * Options needed for span creation\n */\nexport interface SugaredSpanOptions extends SpanOptions {\n /**\n * function to overwrite default exception behavior to record the exception. No exceptions should be thrown in the function.\n * @param e Error which triggered this exception\n * @param span current span from context\n */\n onException?: (e: Error, span: Span) => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.d.ts b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.d.ts new file mode 100644 index 0000000..1ba7da9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.d.ts @@ -0,0 +1,64 @@ +import { SugaredSpanOptions } from './SugaredOptions'; +import { Context, Span, Tracer } from '../../'; +/** + * return a new SugaredTracer created from the supplied one + * @param tracer + */ +export declare function wrapTracer(tracer: Tracer): SugaredTracer; +export declare class SugaredTracer implements Tracer { + private readonly _tracer; + constructor(tracer: Tracer); + startActiveSpan: Tracer['startActiveSpan']; + startSpan: Tracer['startSpan']; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally, the new span gets set in context and this context is activated + * for the duration of the function call. + * The span will be closed after the function has executed. + * If an exception occurs, it is recorded, the status is set to ERROR and the exception is rethrown. + * + * @param name The name of the span + * @param [options] SugaredSpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.withActiveSpan('op', span => { + * // do some work + * }); + * @example + * const something = await tracer.withActiveSpan('op', span => { + * // do some async work + * }); + */ + withActiveSpan ReturnType>(name: string, fn: F): ReturnType; + withActiveSpan ReturnType>(name: string, options: SugaredSpanOptions, fn: F): ReturnType; + withActiveSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; + /** + * Starts a new {@link Span} and ends it after execution of fn without setting it on context. + * The span will be closed after the function has executed. + * If an exception occurs, it is recorded, the status is et to ERROR and rethrown. + * + * This method does NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SugaredSpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns Span The newly created span + * @example + * const something = tracer.withSpan('op', span => { + * // do some work + * }); + * @example + * const something = await tracer.withSpan('op', span => { + * // do some async work + * }); + */ + withSpan ReturnType>(name: string, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; + withSpan ReturnType>(name: string, options: SugaredSpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=SugaredTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js new file mode 100644 index 0000000..aae6249 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js @@ -0,0 +1,93 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SugaredTracer = exports.wrapTracer = void 0; +const __1 = require("../../"); +const defaultOnException = (e, span) => { + span.recordException(e); + span.setStatus({ + code: __1.SpanStatusCode.ERROR, + }); +}; +/** + * return a new SugaredTracer created from the supplied one + * @param tracer + */ +function wrapTracer(tracer) { + return new SugaredTracer(tracer); +} +exports.wrapTracer = wrapTracer; +class SugaredTracer { + constructor(tracer) { + this._tracer = tracer; + this.startSpan = tracer.startSpan.bind(this._tracer); + this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer); + } + withActiveSpan(name, arg2, arg3, arg4) { + const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); + return this._tracer.startActiveSpan(name, opts, ctx, (span) => handleFn(span, opts, fn)); + } + withSpan(name, arg2, arg3, arg4) { + const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); + const span = this._tracer.startSpan(name, opts, ctx); + return handleFn(span, opts, fn); + } +} +exports.SugaredTracer = SugaredTracer; +/** + * Massages parameters of withSpan and withActiveSpan to allow signature overwrites + * @param arg + * @param arg2 + * @param arg3 + */ +function massageParams(arg, arg2, arg3) { + let opts; + let ctx; + let fn; + if (!arg2 && !arg3) { + fn = arg; + } + else if (!arg3) { + opts = arg; + fn = arg2; + } + else { + opts = arg; + ctx = arg2; + fn = arg3; + } + opts = opts !== null && opts !== void 0 ? opts : {}; + ctx = ctx !== null && ctx !== void 0 ? ctx : __1.context.active(); + return { opts, ctx, fn }; +} +/** + * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling + * @param span + * @param opts + * @param fn + */ +function handleFn(span, opts, fn) { + var _a; + const onException = (_a = opts.onException) !== null && _a !== void 0 ? _a : defaultOnException; + const errorHandler = (e) => { + onException(e, span); + span.end(); + throw e; + }; + try { + const ret = fn(span); + // if fn is an async function, attach a recordException and spanEnd callback to the promise + if (typeof (ret === null || ret === void 0 ? void 0 : ret.then) === 'function') { + return ret.then(val => { + span.end(); + return val; + }, errorHandler); + } + span.end(); + return ret; + } + catch (e) { + // add throw to signal the compiler that this will throw in the inner scope + throw errorHandler(e); + } +} +//# sourceMappingURL=SugaredTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js.map b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js.map new file mode 100644 index 0000000..b7abda9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SugaredTracer.js","sourceRoot":"","sources":["../../../../src/experimental/trace/SugaredTracer.ts"],"names":[],"mappings":";;;AAgBA,8BAAwE;AAExE,MAAM,kBAAkB,GAAG,CAAC,CAAQ,EAAE,IAAU,EAAE,EAAE;IAClD,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,CAAC,SAAS,CAAC;QACb,IAAI,EAAE,kBAAc,CAAC,KAAK;KAC3B,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;GAGG;AACH,SAAgB,UAAU,CAAC,MAAc;IACvC,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,CAAC;AACnC,CAAC;AAFD,gCAEC;AAED,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACnE,CAAC;IA0CD,cAAc,CACZ,IAAY,EACZ,IAA4B,EAC5B,IAAkB,EAClB,IAAQ;QAER,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,aAAa,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAE1D,OAAO,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,IAAU,EAAE,EAAE,CAClE,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAC,CACR,CAAC;IACrB,CAAC;IA4CD,QAAQ,CACN,IAAY,EACZ,IAA4B,EAC5B,IAAkB,EAClB,IAAQ;QAER,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,aAAa,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAE1D,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAkB,CAAC;IACnD,CAAC;CACF;AAnHD,sCAmHC;AAED;;;;;GAKG;AACH,SAAS,aAAa,CACpB,GAA2B,EAC3B,IAAkB,EAClB,IAAQ;IAER,IAAI,IAAoC,CAAC;IACzC,IAAI,GAAwB,CAAC;IAC7B,IAAI,EAAK,CAAC;IAEV,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE;QAClB,EAAE,GAAG,GAAQ,CAAC;KACf;SAAM,IAAI,CAAC,IAAI,EAAE;QAChB,IAAI,GAAG,GAAyB,CAAC;QACjC,EAAE,GAAG,IAAS,CAAC;KAChB;SAAM;QACL,IAAI,GAAG,GAAyB,CAAC;QACjC,GAAG,GAAG,IAAe,CAAC;QACtB,EAAE,GAAG,IAAS,CAAC;KAChB;IACD,IAAI,GAAG,IAAI,aAAJ,IAAI,cAAJ,IAAI,GAAI,EAAE,CAAC;IAClB,GAAG,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,WAAO,CAAC,MAAM,EAAE,CAAC;IAE9B,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;AAC3B,CAAC;AAED;;;;;GAKG;AACH,SAAS,QAAQ,CACf,IAAU,EACV,IAAwB,EACxB,EAAK;;IAEL,MAAM,WAAW,GAAG,MAAA,IAAI,CAAC,WAAW,mCAAI,kBAAkB,CAAC;IAC3D,MAAM,YAAY,GAAG,CAAC,CAAQ,EAAE,EAAE;QAChC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QACrB,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,MAAM,CAAC,CAAC;IACV,CAAC,CAAC;IAEF,IAAI;QACF,MAAM,GAAG,GAAG,EAAE,CAAC,IAAI,CAA2B,CAAC;QAC/C,2FAA2F;QAC3F,IAAI,OAAO,CAAA,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,CAAA,KAAK,UAAU,EAAE;YACnC,OAAO,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACpB,IAAI,CAAC,GAAG,EAAE,CAAC;gBACX,OAAO,GAAG,CAAC;YACb,CAAC,EAAE,YAAY,CAAkB,CAAC;SACnC;QACD,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,OAAO,GAAoB,CAAC;KAC7B;IAAC,OAAO,CAAC,EAAE;QACV,2EAA2E;QAC3E,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { SugaredSpanOptions } from './SugaredOptions';\nimport { context, Context, Span, SpanStatusCode, Tracer } from '../../';\n\nconst defaultOnException = (e: Error, span: Span) => {\n span.recordException(e);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n });\n};\n\n/**\n * return a new SugaredTracer created from the supplied one\n * @param tracer\n */\nexport function wrapTracer(tracer: Tracer): SugaredTracer {\n return new SugaredTracer(tracer);\n}\n\nexport class SugaredTracer implements Tracer {\n private readonly _tracer: Tracer;\n\n constructor(tracer: Tracer) {\n this._tracer = tracer;\n this.startSpan = tracer.startSpan.bind(this._tracer);\n this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer);\n }\n\n startActiveSpan: Tracer['startActiveSpan'];\n startSpan: Tracer['startSpan'];\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally, the new span gets set in context and this context is activated\n * for the duration of the function call.\n * The span will be closed after the function has executed.\n * If an exception occurs, it is recorded, the status is set to ERROR and the exception is rethrown.\n *\n * @param name The name of the span\n * @param [options] SugaredSpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.withActiveSpan('op', span => {\n * // do some work\n * });\n * @example\n * const something = await tracer.withActiveSpan('op', span => {\n * // do some async work\n * });\n */\n withActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withActiveSpan ReturnType>(\n name: string,\n arg2: F | SugaredSpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType {\n const { opts, ctx, fn } = massageParams(arg2, arg3, arg4);\n\n return this._tracer.startActiveSpan(name, opts, ctx, (span: Span) =>\n handleFn(span, opts, fn)\n ) as ReturnType;\n }\n\n /**\n * Starts a new {@link Span} and ends it after execution of fn without setting it on context.\n * The span will be closed after the function has executed.\n * If an exception occurs, it is recorded, the status is et to ERROR and rethrown.\n *\n * This method does NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SugaredSpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns Span The newly created span\n * @example\n * const something = tracer.withSpan('op', span => {\n * // do some work\n * });\n * @example\n * const something = await tracer.withSpan('op', span => {\n * // do some async work\n * });\n */\n withSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n options: SugaredSpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n withSpan ReturnType>(\n name: string,\n arg2: SugaredSpanOptions | F,\n arg3?: Context | F,\n arg4?: F\n ): ReturnType {\n const { opts, ctx, fn } = massageParams(arg2, arg3, arg4);\n\n const span = this._tracer.startSpan(name, opts, ctx);\n return handleFn(span, opts, fn) as ReturnType;\n }\n}\n\n/**\n * Massages parameters of withSpan and withActiveSpan to allow signature overwrites\n * @param arg\n * @param arg2\n * @param arg3\n */\nfunction massageParams ReturnType>(\n arg: F | SugaredSpanOptions,\n arg2?: F | Context,\n arg3?: F\n) {\n let opts: SugaredSpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (!arg2 && !arg3) {\n fn = arg as F;\n } else if (!arg3) {\n opts = arg as SugaredSpanOptions;\n fn = arg2 as F;\n } else {\n opts = arg as SugaredSpanOptions;\n ctx = arg2 as Context;\n fn = arg3 as F;\n }\n opts = opts ?? {};\n ctx = ctx ?? context.active();\n\n return { opts, ctx, fn };\n}\n\n/**\n * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling\n * @param span\n * @param opts\n * @param fn\n */\nfunction handleFn ReturnType>(\n span: Span,\n opts: SugaredSpanOptions,\n fn: F\n): ReturnType {\n const onException = opts.onException ?? defaultOnException;\n const errorHandler = (e: Error) => {\n onException(e, span);\n span.end();\n throw e;\n };\n\n try {\n const ret = fn(span) as Promise>;\n // if fn is an async function, attach a recordException and spanEnd callback to the promise\n if (typeof ret?.then === 'function') {\n return ret.then(val => {\n span.end();\n return val;\n }, errorHandler) as ReturnType;\n }\n span.end();\n return ret as ReturnType;\n } catch (e) {\n // add throw to signal the compiler that this will throw in the inner scope\n throw errorHandler(e);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.d.ts b/node_modules/@opentelemetry/api/build/src/index.d.ts new file mode 100644 index 0000000..89ae493 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.d.ts @@ -0,0 +1,54 @@ +export { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export { Exception } from './common/Exception'; +export { HrTime, TimeInput } from './common/Time'; +export { Attributes, AttributeValue } from './common/Attributes'; +export { createContextKey, ROOT_CONTEXT } from './context/context'; +export { Context, ContextManager } from './context/types'; +export type { ContextAPI } from './api/context'; +export { DiagConsoleLogger } from './diag/consoleLogger'; +export { DiagLogFunction, DiagLogger, DiagLogLevel, ComponentLoggerOptions, DiagLoggerOptions, } from './diag/types'; +export type { DiagAPI } from './api/diag'; +export { createNoopMeter } from './metrics/NoopMeter'; +export { MeterOptions, Meter } from './metrics/Meter'; +export { MeterProvider } from './metrics/MeterProvider'; +export { ValueType, Counter, Histogram, MetricOptions, Observable, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, BatchObservableCallback, MetricAdvice, MetricAttributes, MetricAttributeValue, ObservableCallback, } from './metrics/Metric'; +export { BatchObservableResult, ObservableResult, } from './metrics/ObservableResult'; +export type { MetricsAPI } from './api/metrics'; +export { TextMapPropagator, TextMapSetter, TextMapGetter, defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; +export type { PropagationAPI } from './api/propagation'; +export { SpanAttributes, SpanAttributeValue } from './trace/attributes'; +export { Link } from './trace/link'; +export { ProxyTracer, TracerDelegator } from './trace/ProxyTracer'; +export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; +export { Sampler } from './trace/Sampler'; +export { SamplingDecision, SamplingResult } from './trace/SamplingResult'; +export { SpanContext } from './trace/span_context'; +export { SpanKind } from './trace/span_kind'; +export { Span } from './trace/span'; +export { SpanOptions } from './trace/SpanOptions'; +export { SpanStatus, SpanStatusCode } from './trace/status'; +export { TraceFlags } from './trace/trace_flags'; +export { TraceState } from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export { TracerProvider } from './trace/tracer_provider'; +export { Tracer } from './trace/tracer'; +export { TracerOptions } from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export type { TraceAPI } from './api/trace'; +import { context } from './context-api'; +import { diag } from './diag-api'; +import { metrics } from './metrics-api'; +import { propagation } from './propagation-api'; +import { trace } from './trace-api'; +export { context, diag, metrics, propagation, trace }; +declare const _default: { + context: import("./api/context").ContextAPI; + diag: import("./api/diag").DiagAPI; + metrics: import("./api/metrics").MetricsAPI; + propagation: import("./api/propagation").PropagationAPI; + trace: import("./api/trace").TraceAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js b/node_modules/@opentelemetry/api/build/src/index.js new file mode 100644 index 0000000..cb0a872 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.js @@ -0,0 +1,81 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.trace = exports.propagation = exports.metrics = exports.diag = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.TraceFlags = exports.SpanStatusCode = exports.SpanKind = exports.SamplingDecision = exports.ProxyTracerProvider = exports.ProxyTracer = exports.defaultTextMapSetter = exports.defaultTextMapGetter = exports.ValueType = exports.createNoopMeter = exports.DiagLogLevel = exports.DiagConsoleLogger = exports.ROOT_CONTEXT = exports.createContextKey = exports.baggageEntryMetadataFromString = void 0; +var utils_1 = require("./baggage/utils"); +Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); +// Context APIs +var context_1 = require("./context/context"); +Object.defineProperty(exports, "createContextKey", { enumerable: true, get: function () { return context_1.createContextKey; } }); +Object.defineProperty(exports, "ROOT_CONTEXT", { enumerable: true, get: function () { return context_1.ROOT_CONTEXT; } }); +// Diag APIs +var consoleLogger_1 = require("./diag/consoleLogger"); +Object.defineProperty(exports, "DiagConsoleLogger", { enumerable: true, get: function () { return consoleLogger_1.DiagConsoleLogger; } }); +var types_1 = require("./diag/types"); +Object.defineProperty(exports, "DiagLogLevel", { enumerable: true, get: function () { return types_1.DiagLogLevel; } }); +// Metrics APIs +var NoopMeter_1 = require("./metrics/NoopMeter"); +Object.defineProperty(exports, "createNoopMeter", { enumerable: true, get: function () { return NoopMeter_1.createNoopMeter; } }); +var Metric_1 = require("./metrics/Metric"); +Object.defineProperty(exports, "ValueType", { enumerable: true, get: function () { return Metric_1.ValueType; } }); +// Propagation APIs +var TextMapPropagator_1 = require("./propagation/TextMapPropagator"); +Object.defineProperty(exports, "defaultTextMapGetter", { enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapGetter; } }); +Object.defineProperty(exports, "defaultTextMapSetter", { enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapSetter; } }); +var ProxyTracer_1 = require("./trace/ProxyTracer"); +Object.defineProperty(exports, "ProxyTracer", { enumerable: true, get: function () { return ProxyTracer_1.ProxyTracer; } }); +var ProxyTracerProvider_1 = require("./trace/ProxyTracerProvider"); +Object.defineProperty(exports, "ProxyTracerProvider", { enumerable: true, get: function () { return ProxyTracerProvider_1.ProxyTracerProvider; } }); +var SamplingResult_1 = require("./trace/SamplingResult"); +Object.defineProperty(exports, "SamplingDecision", { enumerable: true, get: function () { return SamplingResult_1.SamplingDecision; } }); +var span_kind_1 = require("./trace/span_kind"); +Object.defineProperty(exports, "SpanKind", { enumerable: true, get: function () { return span_kind_1.SpanKind; } }); +var status_1 = require("./trace/status"); +Object.defineProperty(exports, "SpanStatusCode", { enumerable: true, get: function () { return status_1.SpanStatusCode; } }); +var trace_flags_1 = require("./trace/trace_flags"); +Object.defineProperty(exports, "TraceFlags", { enumerable: true, get: function () { return trace_flags_1.TraceFlags; } }); +var utils_2 = require("./trace/internal/utils"); +Object.defineProperty(exports, "createTraceState", { enumerable: true, get: function () { return utils_2.createTraceState; } }); +var spancontext_utils_1 = require("./trace/spancontext-utils"); +Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } }); +Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); +Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); +var invalid_span_constants_1 = require("./trace/invalid-span-constants"); +Object.defineProperty(exports, "INVALID_SPANID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } }); +Object.defineProperty(exports, "INVALID_TRACEID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } }); +Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } }); +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const context_api_1 = require("./context-api"); +Object.defineProperty(exports, "context", { enumerable: true, get: function () { return context_api_1.context; } }); +const diag_api_1 = require("./diag-api"); +Object.defineProperty(exports, "diag", { enumerable: true, get: function () { return diag_api_1.diag; } }); +const metrics_api_1 = require("./metrics-api"); +Object.defineProperty(exports, "metrics", { enumerable: true, get: function () { return metrics_api_1.metrics; } }); +const propagation_api_1 = require("./propagation-api"); +Object.defineProperty(exports, "propagation", { enumerable: true, get: function () { return propagation_api_1.propagation; } }); +const trace_api_1 = require("./trace-api"); +Object.defineProperty(exports, "trace", { enumerable: true, get: function () { return trace_api_1.trace; } }); +// Default export. +exports.default = { + context: context_api_1.context, + diag: diag_api_1.diag, + metrics: metrics_api_1.metrics, + propagation: propagation_api_1.propagation, + trace: trace_api_1.trace, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js.map b/node_modules/@opentelemetry/api/build/src/index.js.map new file mode 100644 index 0000000..a6739f2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,yCAAiE;AAAxD,uHAAA,8BAA8B,OAAA;AAKvC,eAAe;AACf,6CAAmE;AAA1D,2GAAA,gBAAgB,OAAA;AAAE,uGAAA,YAAY,OAAA;AAIvC,YAAY;AACZ,sDAAyD;AAAhD,kHAAA,iBAAiB,OAAA;AAC1B,sCAMsB;AAHpB,qGAAA,YAAY,OAAA;AAMd,eAAe;AACf,iDAAsD;AAA7C,4GAAA,eAAe,OAAA;AAGxB,2CAe0B;AAdxB,mGAAA,SAAS,OAAA;AAqBX,mBAAmB;AACnB,qEAMyC;AAFvC,yHAAA,oBAAoB,OAAA;AACpB,yHAAA,oBAAoB,OAAA;AAOtB,mDAAmE;AAA1D,0GAAA,WAAW,OAAA;AACpB,mEAAkE;AAAzD,0HAAA,mBAAmB,OAAA;AAE5B,yDAA0E;AAAjE,kHAAA,gBAAgB,OAAA;AAEzB,+CAA6C;AAApC,qGAAA,QAAQ,OAAA;AAGjB,yCAA4D;AAAvC,wGAAA,cAAc,OAAA;AACnC,mDAAiD;AAAxC,yGAAA,UAAU,OAAA;AAEnB,gDAA0D;AAAjD,yGAAA,gBAAgB,OAAA;AAIzB,+DAImC;AAHjC,uHAAA,kBAAkB,OAAA;AAClB,mHAAA,cAAc,OAAA;AACd,kHAAA,aAAa,OAAA;AAEf,yEAIwC;AAHtC,wHAAA,cAAc,OAAA;AACd,yHAAA,eAAe,OAAA;AACf,8HAAA,oBAAoB,OAAA;AAItB,sEAAsE;AACtE,qCAAqC;AACrC,+CAAwC;AAO/B,wFAPA,qBAAO,OAOA;AANhB,yCAAkC;AAMhB,qFANT,eAAI,OAMS;AALtB,+CAAwC;AAKhB,wFALf,qBAAO,OAKe;AAJ/B,uDAAgD;AAIf,4FAJxB,6BAAW,OAIwB;AAH5C,2CAAoC;AAGU,sFAHrC,iBAAK,OAGqC;AACnD,kBAAkB;AAClB,kBAAe;IACb,OAAO,EAAP,qBAAO;IACP,IAAI,EAAJ,eAAI;IACJ,OAAO,EAAP,qBAAO;IACP,WAAW,EAAX,6BAAW;IACX,KAAK,EAAL,iBAAK;CACN,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport { Exception } from './common/Exception';\nexport { HrTime, TimeInput } from './common/Time';\nexport { Attributes, AttributeValue } from './common/Attributes';\n\n// Context APIs\nexport { createContextKey, ROOT_CONTEXT } from './context/context';\nexport { Context, ContextManager } from './context/types';\nexport type { ContextAPI } from './api/context';\n\n// Diag APIs\nexport { DiagConsoleLogger } from './diag/consoleLogger';\nexport {\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n ComponentLoggerOptions,\n DiagLoggerOptions,\n} from './diag/types';\nexport type { DiagAPI } from './api/diag';\n\n// Metrics APIs\nexport { createNoopMeter } from './metrics/NoopMeter';\nexport { MeterOptions, Meter } from './metrics/Meter';\nexport { MeterProvider } from './metrics/MeterProvider';\nexport {\n ValueType,\n Counter,\n Histogram,\n MetricOptions,\n Observable,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n BatchObservableCallback,\n MetricAdvice,\n MetricAttributes,\n MetricAttributeValue,\n ObservableCallback,\n} from './metrics/Metric';\nexport {\n BatchObservableResult,\n ObservableResult,\n} from './metrics/ObservableResult';\nexport type { MetricsAPI } from './api/metrics';\n\n// Propagation APIs\nexport {\n TextMapPropagator,\n TextMapSetter,\n TextMapGetter,\n defaultTextMapGetter,\n defaultTextMapSetter,\n} from './propagation/TextMapPropagator';\nexport type { PropagationAPI } from './api/propagation';\n\n// Trace APIs\nexport { SpanAttributes, SpanAttributeValue } from './trace/attributes';\nexport { Link } from './trace/link';\nexport { ProxyTracer, TracerDelegator } from './trace/ProxyTracer';\nexport { ProxyTracerProvider } from './trace/ProxyTracerProvider';\nexport { Sampler } from './trace/Sampler';\nexport { SamplingDecision, SamplingResult } from './trace/SamplingResult';\nexport { SpanContext } from './trace/span_context';\nexport { SpanKind } from './trace/span_kind';\nexport { Span } from './trace/span';\nexport { SpanOptions } from './trace/SpanOptions';\nexport { SpanStatus, SpanStatusCode } from './trace/status';\nexport { TraceFlags } from './trace/trace_flags';\nexport { TraceState } from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport { TracerProvider } from './trace/tracer_provider';\nexport { Tracer } from './trace/tracer';\nexport { TracerOptions } from './trace/tracer_options';\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\nexport type { TraceAPI } from './api/trace';\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { context } from './context-api';\nimport { diag } from './diag-api';\nimport { metrics } from './metrics-api';\nimport { propagation } from './propagation-api';\nimport { trace } from './trace-api';\n\n// Named export.\nexport { context, diag, metrics, propagation, trace };\n// Default export.\nexport default {\n context,\n diag,\n metrics,\n propagation,\n trace,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts new file mode 100644 index 0000000..320db97 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts @@ -0,0 +1,18 @@ +import { MeterProvider } from '../metrics/MeterProvider'; +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag/types'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + metrics?: MeterProvider; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js new file mode 100644 index 0000000..11a1a44 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js @@ -0,0 +1,64 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +const platform_1 = require("../platform"); +const version_1 = require("../version"); +const semver_1 = require("./semver"); +const major = version_1.VERSION.split('.')[0]; +const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); +const _global = platform_1._globalThis; +function registerGlobal(type, instance, diag, allowOverride = false) { + var _a; + const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: version_1.VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== version_1.VERSION) { + // All registered APIs must be of the same version exactly + const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${version_1.VERSION}`); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug(`@opentelemetry/api: Registered a global for ${type} v${version_1.VERSION}.`); + return true; +} +exports.registerGlobal = registerGlobal; +function getGlobal(type) { + var _a, _b; + const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !(0, semver_1.isCompatible)(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +exports.getGlobal = getGlobal; +function unregisterGlobal(type, diag) { + diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${version_1.VERSION}.`); + const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +exports.unregisterGlobal = unregisterGlobal; +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map new file mode 100644 index 0000000..ddca637 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,0CAA0C;AAG1C,wCAAqC;AACrC,qCAAwC;AAExC,MAAM,KAAK,GAAG,iBAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,MAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,wBAAwB,KAAK,EAAE,CAChC,CAAC;AAEF,MAAM,OAAO,GAAG,sBAAyB,CAAC;AAE1C,SAAgB,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAa,GAAG,KAAK;;IAErB,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,iBAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,MAAM,GAAG,GAAG,IAAI,KAAK,CACnB,gEAAgE,IAAI,EAAE,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,iBAAO,EAAE;QAC3B,0DAA0D;QAC1D,MAAM,GAAG,GAAG,IAAI,KAAK,CACnB,gDAAgD,GAAG,CAAC,OAAO,QAAQ,IAAI,8CAA8C,iBAAO,EAAE,CAC/H,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,+CAA+C,IAAI,KAAK,iBAAO,GAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AApCD,wCAoCC;AAED,SAAgB,SAAS,CACvB,IAAU;;IAEV,MAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,IAAA,qBAAY,EAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AARD,8BAQC;AAED,SAAgB,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,kDAAkD,IAAI,KAAK,iBAAO,GAAG,CACtE,CAAC;IACF,MAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC;AATD,4CASC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { MeterProvider } from '../metrics/MeterProvider';\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag/types';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n `@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${VERSION}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n metrics?: MeterProvider;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts new file mode 100644 index 0000000..d9f4259 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js b/node_modules/@opentelemetry/api/build/src/internal/semver.js new file mode 100644 index 0000000..7a073b2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.js @@ -0,0 +1,122 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isCompatible = exports._makeCompatibilityCheck = void 0; +const version_1 = require("../version"); +const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +function _makeCompatibilityCheck(ownVersion) { + const acceptedVersions = new Set([ownVersion]); + const rejectedVersions = new Set(); + const myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return () => false; + } + const ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + const globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + const globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +exports._makeCompatibilityCheck = _makeCompatibilityCheck; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js.map b/node_modules/@opentelemetry/api/build/src/internal/semver.js.map new file mode 100644 index 0000000..d58dc78 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,wCAAqC;AAErC,MAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,uBAAuB,CACrC,UAAkB;IAElB,MAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,MAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,MAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,GAAG,EAAE,CAAC,KAAK,CAAC;KACpB;IAED,MAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,MAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,MAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAtFD,0DAsFC;AAED;;;;;;;;;;;;;;GAcG;AACU,QAAA,YAAY,GAAG,uBAAuB,CAAC,iBAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics-api.d.ts b/node_modules/@opentelemetry/api/build/src/metrics-api.d.ts new file mode 100644 index 0000000..26d539c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics-api.d.ts @@ -0,0 +1,4 @@ +import { MetricsAPI } from './api/metrics'; +/** Entrypoint for metrics API */ +export declare const metrics: MetricsAPI; +//# sourceMappingURL=metrics-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics-api.js b/node_modules/@opentelemetry/api/build/src/metrics-api.js new file mode 100644 index 0000000..987f7c2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics-api.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.metrics = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const metrics_1 = require("./api/metrics"); +/** Entrypoint for metrics API */ +exports.metrics = metrics_1.MetricsAPI.getInstance(); +//# sourceMappingURL=metrics-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics-api.js.map b/node_modules/@opentelemetry/api/build/src/metrics-api.js.map new file mode 100644 index 0000000..26e1802 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"metrics-api.js","sourceRoot":"","sources":["../../src/metrics-api.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,2CAA2C;AAC3C,iCAAiC;AACpB,QAAA,OAAO,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { MetricsAPI } from './api/metrics';\n/** Entrypoint for metrics API */\nexport const metrics = MetricsAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Meter.d.ts b/node_modules/@opentelemetry/api/build/src/metrics/Meter.d.ts new file mode 100644 index 0000000..fb26fc8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/Meter.d.ts @@ -0,0 +1,104 @@ +import { BatchObservableCallback, Counter, Histogram, MetricAttributes, MetricOptions, Observable, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter } from './Metric'; +/** + * An interface describes additional metadata of a meter. + */ +export interface MeterOptions { + /** + * The schemaUrl of the meter or instrumentation library + */ + schemaUrl?: string; +} +/** + * An interface to allow the recording metrics. + * + * {@link Metric}s are used for recording pre-defined aggregation (`Counter`), + * or raw values (`Histogram`) in which the aggregation and attributes + * for the exported metric are deferred. + */ +export interface Meter { + /** + * Creates and returns a new `Histogram`. + * @param name the name of the metric. + * @param [options] the metric options. + */ + createHistogram(name: string, options?: MetricOptions): Histogram; + /** + * Creates a new `Counter` metric. Generally, this kind of metric when the + * value is a quantity, the sum is of primary interest, and the event count + * and value distribution are not of primary interest. + * @param name the name of the metric. + * @param [options] the metric options. + */ + createCounter(name: string, options?: MetricOptions): Counter; + /** + * Creates a new `UpDownCounter` metric. UpDownCounter is a synchronous + * instrument and very similar to Counter except that Add(increment) + * supports negative increments. It is generally useful for capturing changes + * in an amount of resources used, or any quantity that rises and falls + * during a request. + * Example uses for UpDownCounter: + *
                    + *
                  1. count the number of active requests.
                  2. + *
                  3. count memory in use by instrumenting new and delete.
                  4. + *
                  5. count queue size by instrumenting enqueue and dequeue.
                  6. + *
                  7. count semaphore up and down operations.
                  8. + *
                  + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createUpDownCounter(name: string, options?: MetricOptions): UpDownCounter; + /** + * Creates a new `ObservableGauge` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableGauge(name: string, options?: MetricOptions): ObservableGauge; + /** + * Creates a new `ObservableCounter` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableCounter(name: string, options?: MetricOptions): ObservableCounter; + /** + * Creates a new `ObservableUpDownCounter` metric. + * + * The callback SHOULD be safe to be invoked concurrently. + * + * @param name the name of the metric. + * @param [options] the metric options. + */ + createObservableUpDownCounter(name: string, options?: MetricOptions): ObservableUpDownCounter; + /** + * Sets up a function that will be called whenever a metric collection is + * initiated. + * + * If the function is already in the list of callbacks for this Observable, + * the function is not added a second time. + * + * Only the associated observables can be observed in the callback. + * Measurements of observables that are not associated observed in the + * callback are dropped. + * + * @param callback the batch observable callback + * @param observables the observables associated with this batch observable callback + */ + addBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void; + /** + * Removes a callback previously registered with {@link Meter.addBatchObservableCallback}. + * + * The callback to be removed is identified using a combination of the callback itself, + * and the set of the observables associated with it. + * + * @param callback the batch observable callback + * @param observables the observables associated with this batch observable callback + */ + removeBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void; +} +//# sourceMappingURL=Meter.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Meter.js b/node_modules/@opentelemetry/api/build/src/metrics/Meter.js new file mode 100644 index 0000000..56b930c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/Meter.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Meter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Meter.js.map b/node_modules/@opentelemetry/api/build/src/metrics/Meter.js.map new file mode 100644 index 0000000..9f78c10 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/Meter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Meter.js","sourceRoot":"","sources":["../../../src/metrics/Meter.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n BatchObservableCallback,\n Counter,\n Histogram,\n MetricAttributes,\n MetricOptions,\n Observable,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n} from './Metric';\n\n/**\n * An interface describes additional metadata of a meter.\n */\nexport interface MeterOptions {\n /**\n * The schemaUrl of the meter or instrumentation library\n */\n schemaUrl?: string;\n}\n\n/**\n * An interface to allow the recording metrics.\n *\n * {@link Metric}s are used for recording pre-defined aggregation (`Counter`),\n * or raw values (`Histogram`) in which the aggregation and attributes\n * for the exported metric are deferred.\n */\nexport interface Meter {\n /**\n * Creates and returns a new `Histogram`.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createHistogram(\n name: string,\n options?: MetricOptions\n ): Histogram;\n\n /**\n * Creates a new `Counter` metric. Generally, this kind of metric when the\n * value is a quantity, the sum is of primary interest, and the event count\n * and value distribution are not of primary interest.\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createCounter(\n name: string,\n options?: MetricOptions\n ): Counter;\n\n /**\n * Creates a new `UpDownCounter` metric. UpDownCounter is a synchronous\n * instrument and very similar to Counter except that Add(increment)\n * supports negative increments. It is generally useful for capturing changes\n * in an amount of resources used, or any quantity that rises and falls\n * during a request.\n * Example uses for UpDownCounter:\n *
                    \n *
                  1. count the number of active requests.
                  2. \n *
                  3. count memory in use by instrumenting new and delete.
                  4. \n *
                  5. count queue size by instrumenting enqueue and dequeue.
                  6. \n *
                  7. count semaphore up and down operations.
                  8. \n *
                  \n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): UpDownCounter;\n\n /**\n * Creates a new `ObservableGauge` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableGauge<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableGauge;\n\n /**\n * Creates a new `ObservableCounter` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableCounter;\n\n /**\n * Creates a new `ObservableUpDownCounter` metric.\n *\n * The callback SHOULD be safe to be invoked concurrently.\n *\n * @param name the name of the metric.\n * @param [options] the metric options.\n */\n createObservableUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n name: string,\n options?: MetricOptions\n ): ObservableUpDownCounter;\n\n /**\n * Sets up a function that will be called whenever a metric collection is\n * initiated.\n *\n * If the function is already in the list of callbacks for this Observable,\n * the function is not added a second time.\n *\n * Only the associated observables can be observed in the callback.\n * Measurements of observables that are not associated observed in the\n * callback are dropped.\n *\n * @param callback the batch observable callback\n * @param observables the observables associated with this batch observable callback\n */\n addBatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n callback: BatchObservableCallback,\n observables: Observable[]\n ): void;\n\n /**\n * Removes a callback previously registered with {@link Meter.addBatchObservableCallback}.\n *\n * The callback to be removed is identified using a combination of the callback itself,\n * and the set of the observables associated with it.\n *\n * @param callback the batch observable callback\n * @param observables the observables associated with this batch observable callback\n */\n removeBatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n >(\n callback: BatchObservableCallback,\n observables: Observable[]\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.d.ts b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.d.ts new file mode 100644 index 0000000..6c08cc3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.d.ts @@ -0,0 +1,17 @@ +import { Meter, MeterOptions } from './Meter'; +/** + * A registry for creating named {@link Meter}s. + */ +export interface MeterProvider { + /** + * Returns a Meter, creating one if one with the given name, version, and + * schemaUrl pair is not already created. + * + * @param name The name of the meter or instrumentation library. + * @param version The version of the meter or instrumentation library. + * @param options The options of the meter or instrumentation library. + * @returns Meter A Meter with the given name and version + */ + getMeter(name: string, version?: string, options?: MeterOptions): Meter; +} +//# sourceMappingURL=MeterProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js new file mode 100644 index 0000000..e94205e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=MeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js.map b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js.map new file mode 100644 index 0000000..80c07b7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"MeterProvider.js","sourceRoot":"","sources":["../../../src/metrics/MeterProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from './Meter';\n\n/**\n * A registry for creating named {@link Meter}s.\n */\nexport interface MeterProvider {\n /**\n * Returns a Meter, creating one if one with the given name, version, and\n * schemaUrl pair is not already created.\n *\n * @param name The name of the meter or instrumentation library.\n * @param version The version of the meter or instrumentation library.\n * @param options The options of the meter or instrumentation library.\n * @returns Meter A Meter with the given name and version\n */\n getMeter(name: string, version?: string, options?: MeterOptions): Meter;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Metric.d.ts b/node_modules/@opentelemetry/api/build/src/metrics/Metric.d.ts new file mode 100644 index 0000000..87a41a8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/Metric.d.ts @@ -0,0 +1,109 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +import { Context } from '../context/types'; +import { BatchObservableResult, ObservableResult } from './ObservableResult'; +/** + * Advisory options influencing aggregation configuration parameters. + * @experimental + */ +export interface MetricAdvice { + /** + * Hint the explicit bucket boundaries for SDK if the metric is been + * aggregated with a HistogramAggregator. + */ + explicitBucketBoundaries?: number[]; +} +/** + * Options needed for metric creation + */ +export interface MetricOptions { + /** + * The description of the Metric. + * @default '' + */ + description?: string; + /** + * The unit of the Metric values. + * @default '' + */ + unit?: string; + /** + * Indicates the type of the recorded value. + * @default {@link ValueType.DOUBLE} + */ + valueType?: ValueType; + /** + * The advice influencing aggregation configuration parameters. + * @experimental + */ + advice?: MetricAdvice; +} +/** The Type of value. It describes how the data is reported. */ +export declare enum ValueType { + INT = 0, + DOUBLE = 1 +} +/** + * Counter is the most common synchronous instrument. This instrument supports + * an `Add(increment)` function for reporting a sum, and is restricted to + * non-negative increments. The default aggregation is Sum, as for any additive + * instrument. + * + * Example uses for Counter: + *
                    + *
                  1. count the number of bytes received.
                  2. + *
                  3. count the number of requests completed.
                  4. + *
                  5. count the number of accounts created.
                  6. + *
                  7. count the number of checkpoints run.
                  8. + *
                  9. count the number of 5xx errors.
                  10. + *
                      + */ +export interface Counter { + /** + * Increment value of counter by the input. Inputs must not be negative. + */ + add(value: number, attributes?: AttributesTypes, context?: Context): void; +} +export interface UpDownCounter { + /** + * Increment value of counter by the input. Inputs may be negative. + */ + add(value: number, attributes?: AttributesTypes, context?: Context): void; +} +export interface Histogram { + /** + * Records a measurement. Value of the measurement must not be negative. + */ + record(value: number, attributes?: AttributesTypes, context?: Context): void; +} +/** + * @deprecated please use {@link Attributes} + */ +export declare type MetricAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type MetricAttributeValue = AttributeValue; +/** + * The observable callback for Observable instruments. + */ +export declare type ObservableCallback = (observableResult: ObservableResult) => void | Promise; +/** + * The observable callback for a batch of Observable instruments. + */ +export declare type BatchObservableCallback = (observableResult: BatchObservableResult) => void | Promise; +export interface Observable { + /** + * Sets up a function that will be called whenever a metric collection is initiated. + * + * If the function is already in the list of callbacks for this Observable, the function is not added a second time. + */ + addCallback(callback: ObservableCallback): void; + /** + * Removes a callback previously registered with {@link Observable.addCallback}. + */ + removeCallback(callback: ObservableCallback): void; +} +export declare type ObservableCounter = Observable; +export declare type ObservableUpDownCounter = Observable; +export declare type ObservableGauge = Observable; +//# sourceMappingURL=Metric.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Metric.js b/node_modules/@opentelemetry/api/build/src/metrics/Metric.js new file mode 100644 index 0000000..4966c3d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/Metric.js @@ -0,0 +1,25 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ValueType = void 0; +/** The Type of value. It describes how the data is reported. */ +var ValueType; +(function (ValueType) { + ValueType[ValueType["INT"] = 0] = "INT"; + ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; +})(ValueType = exports.ValueType || (exports.ValueType = {})); +//# sourceMappingURL=Metric.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Metric.js.map b/node_modules/@opentelemetry/api/build/src/metrics/Metric.js.map new file mode 100644 index 0000000..3895951 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/Metric.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Metric.js","sourceRoot":"","sources":["../../../src/metrics/Metric.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AA+CH,gEAAgE;AAChE,IAAY,SAGX;AAHD,WAAY,SAAS;IACnB,uCAAG,CAAA;IACH,6CAAM,CAAA;AACR,CAAC,EAHW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAGpB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\nimport { Context } from '../context/types';\nimport { BatchObservableResult, ObservableResult } from './ObservableResult';\n\n/**\n * Advisory options influencing aggregation configuration parameters.\n * @experimental\n */\nexport interface MetricAdvice {\n /**\n * Hint the explicit bucket boundaries for SDK if the metric is been\n * aggregated with a HistogramAggregator.\n */\n explicitBucketBoundaries?: number[];\n}\n\n/**\n * Options needed for metric creation\n */\nexport interface MetricOptions {\n /**\n * The description of the Metric.\n * @default ''\n */\n description?: string;\n\n /**\n * The unit of the Metric values.\n * @default ''\n */\n unit?: string;\n\n /**\n * Indicates the type of the recorded value.\n * @default {@link ValueType.DOUBLE}\n */\n valueType?: ValueType;\n\n /**\n * The advice influencing aggregation configuration parameters.\n * @experimental\n */\n advice?: MetricAdvice;\n}\n\n/** The Type of value. It describes how the data is reported. */\nexport enum ValueType {\n INT,\n DOUBLE,\n}\n\n/**\n * Counter is the most common synchronous instrument. This instrument supports\n * an `Add(increment)` function for reporting a sum, and is restricted to\n * non-negative increments. The default aggregation is Sum, as for any additive\n * instrument.\n *\n * Example uses for Counter:\n *
                        \n *
                      1. count the number of bytes received.
                      2. \n *
                      3. count the number of requests completed.
                      4. \n *
                      5. count the number of accounts created.
                      6. \n *
                      7. count the number of checkpoints run.
                      8. \n *
                      9. count the number of 5xx errors.
                      10. \n *
                          \n */\nexport interface Counter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Increment value of counter by the input. Inputs must not be negative.\n */\n add(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\nexport interface UpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Increment value of counter by the input. Inputs may be negative.\n */\n add(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\nexport interface Histogram<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Records a measurement. Value of the measurement must not be negative.\n */\n record(value: number, attributes?: AttributesTypes, context?: Context): void;\n}\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type MetricAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type MetricAttributeValue = AttributeValue;\n\n/**\n * The observable callback for Observable instruments.\n */\nexport type ObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = (\n observableResult: ObservableResult\n) => void | Promise;\n\n/**\n * The observable callback for a batch of Observable instruments.\n */\nexport type BatchObservableCallback<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = (\n observableResult: BatchObservableResult\n) => void | Promise;\n\nexport interface Observable<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Sets up a function that will be called whenever a metric collection is initiated.\n *\n * If the function is already in the list of callbacks for this Observable, the function is not added a second time.\n */\n addCallback(callback: ObservableCallback): void;\n\n /**\n * Removes a callback previously registered with {@link Observable.addCallback}.\n */\n removeCallback(callback: ObservableCallback): void;\n}\n\nexport type ObservableCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\nexport type ObservableUpDownCounter<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\nexport type ObservableGauge<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> = Observable;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.d.ts b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.d.ts new file mode 100644 index 0000000..0edbd2d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.d.ts @@ -0,0 +1,74 @@ +import { Meter } from './Meter'; +import { BatchObservableCallback, Counter, Histogram, MetricOptions, ObservableCallback, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, MetricAttributes, Observable } from './Metric'; +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +export declare class NoopMeter implements Meter { + constructor(); + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name: string, _options?: MetricOptions): Histogram; + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name: string, _options?: MetricOptions): Counter; + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name: string, _options?: MetricOptions): UpDownCounter; + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name: string, _options?: MetricOptions): ObservableGauge; + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name: string, _options?: MetricOptions): ObservableCounter; + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name: string, _options?: MetricOptions): ObservableUpDownCounter; + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback: BatchObservableCallback, _observables: Observable[]): void; + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback: BatchObservableCallback): void; +} +export declare class NoopMetric { +} +export declare class NoopCounterMetric extends NoopMetric implements Counter { + add(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopUpDownCounterMetric extends NoopMetric implements UpDownCounter { + add(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopHistogramMetric extends NoopMetric implements Histogram { + record(_value: number, _attributes: MetricAttributes): void; +} +export declare class NoopObservableMetric { + addCallback(_callback: ObservableCallback): void; + removeCallback(_callback: ObservableCallback): void; +} +export declare class NoopObservableCounterMetric extends NoopObservableMetric implements ObservableCounter { +} +export declare class NoopObservableGaugeMetric extends NoopObservableMetric implements ObservableGauge { +} +export declare class NoopObservableUpDownCounterMetric extends NoopObservableMetric implements ObservableUpDownCounter { +} +export declare const NOOP_METER: NoopMeter; +export declare const NOOP_COUNTER_METRIC: NoopCounterMetric; +export declare const NOOP_HISTOGRAM_METRIC: NoopHistogramMetric; +export declare const NOOP_UP_DOWN_COUNTER_METRIC: NoopUpDownCounterMetric; +export declare const NOOP_OBSERVABLE_COUNTER_METRIC: NoopObservableCounterMetric; +export declare const NOOP_OBSERVABLE_GAUGE_METRIC: NoopObservableGaugeMetric; +export declare const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC: NoopObservableUpDownCounterMetric; +/** + * Create a no-op Meter + */ +export declare function createNoopMeter(): Meter; +//# sourceMappingURL=NoopMeter.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js new file mode 100644 index 0000000..4c7c922 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js @@ -0,0 +1,116 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; +/** + * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses + * constant NoopMetrics for all of its methods. + */ +class NoopMeter { + constructor() { } + /** + * @see {@link Meter.createHistogram} + */ + createHistogram(_name, _options) { + return exports.NOOP_HISTOGRAM_METRIC; + } + /** + * @see {@link Meter.createCounter} + */ + createCounter(_name, _options) { + return exports.NOOP_COUNTER_METRIC; + } + /** + * @see {@link Meter.createUpDownCounter} + */ + createUpDownCounter(_name, _options) { + return exports.NOOP_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableGauge} + */ + createObservableGauge(_name, _options) { + return exports.NOOP_OBSERVABLE_GAUGE_METRIC; + } + /** + * @see {@link Meter.createObservableCounter} + */ + createObservableCounter(_name, _options) { + return exports.NOOP_OBSERVABLE_COUNTER_METRIC; + } + /** + * @see {@link Meter.createObservableUpDownCounter} + */ + createObservableUpDownCounter(_name, _options) { + return exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; + } + /** + * @see {@link Meter.addBatchObservableCallback} + */ + addBatchObservableCallback(_callback, _observables) { } + /** + * @see {@link Meter.removeBatchObservableCallback} + */ + removeBatchObservableCallback(_callback) { } +} +exports.NoopMeter = NoopMeter; +class NoopMetric { +} +exports.NoopMetric = NoopMetric; +class NoopCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +exports.NoopCounterMetric = NoopCounterMetric; +class NoopUpDownCounterMetric extends NoopMetric { + add(_value, _attributes) { } +} +exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; +class NoopHistogramMetric extends NoopMetric { + record(_value, _attributes) { } +} +exports.NoopHistogramMetric = NoopHistogramMetric; +class NoopObservableMetric { + addCallback(_callback) { } + removeCallback(_callback) { } +} +exports.NoopObservableMetric = NoopObservableMetric; +class NoopObservableCounterMetric extends NoopObservableMetric { +} +exports.NoopObservableCounterMetric = NoopObservableCounterMetric; +class NoopObservableGaugeMetric extends NoopObservableMetric { +} +exports.NoopObservableGaugeMetric = NoopObservableGaugeMetric; +class NoopObservableUpDownCounterMetric extends NoopObservableMetric { +} +exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; +exports.NOOP_METER = new NoopMeter(); +// Synchronous instruments +exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); +exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); +exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); +// Asynchronous instruments +exports.NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); +exports.NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); +exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); +/** + * Create a no-op Meter + */ +function createNoopMeter() { + return exports.NOOP_METER; +} +exports.createNoopMeter = createNoopMeter; +//# sourceMappingURL=NoopMeter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js.map b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js.map new file mode 100644 index 0000000..6fe660f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopMeter.js","sourceRoot":"","sources":["../../../src/metrics/NoopMeter.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAiBH;;;GAGG;AACH,MAAa,SAAS;IACpB,gBAAe,CAAC;IAEhB;;OAEG;IACH,eAAe,CAAC,KAAa,EAAE,QAAwB;QACrD,OAAO,6BAAqB,CAAC;IAC/B,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,KAAa,EAAE,QAAwB;QACnD,OAAO,2BAAmB,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,mBAAmB,CAAC,KAAa,EAAE,QAAwB;QACzD,OAAO,mCAA2B,CAAC;IACrC,CAAC;IAED;;OAEG;IACH,qBAAqB,CACnB,KAAa,EACb,QAAwB;QAExB,OAAO,oCAA4B,CAAC;IACtC,CAAC;IAED;;OAEG;IACH,uBAAuB,CACrB,KAAa,EACb,QAAwB;QAExB,OAAO,sCAA8B,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,6BAA6B,CAC3B,KAAa,EACb,QAAwB;QAExB,OAAO,8CAAsC,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,0BAA0B,CACxB,SAAkC,EAClC,YAA0B,IACnB,CAAC;IAEV;;OAEG;IACH,6BAA6B,CAAC,SAAkC,IAAS,CAAC;CAC3E;AAlED,8BAkEC;AAED,MAAa,UAAU;CAAG;AAA1B,gCAA0B;AAE1B,MAAa,iBAAkB,SAAQ,UAAU;IAC/C,GAAG,CAAC,MAAc,EAAE,WAA6B,IAAS,CAAC;CAC5D;AAFD,8CAEC;AAED,MAAa,uBACX,SAAQ,UAAU;IAGlB,GAAG,CAAC,MAAc,EAAE,WAA6B,IAAS,CAAC;CAC5D;AALD,0DAKC;AAED,MAAa,mBAAoB,SAAQ,UAAU;IACjD,MAAM,CAAC,MAAc,EAAE,WAA6B,IAAS,CAAC;CAC/D;AAFD,kDAEC;AAED,MAAa,oBAAoB;IAC/B,WAAW,CAAC,SAA6B,IAAG,CAAC;IAE7C,cAAc,CAAC,SAA6B,IAAG,CAAC;CACjD;AAJD,oDAIC;AAED,MAAa,2BACX,SAAQ,oBAAoB;CACG;AAFjC,kEAEiC;AAEjC,MAAa,yBACX,SAAQ,oBAAoB;CACC;AAF/B,8DAE+B;AAE/B,MAAa,iCACX,SAAQ,oBAAoB;CACS;AAFvC,8EAEuC;AAE1B,QAAA,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;AAE1C,0BAA0B;AACb,QAAA,mBAAmB,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAC9C,QAAA,qBAAqB,GAAG,IAAI,mBAAmB,EAAE,CAAC;AAClD,QAAA,2BAA2B,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAEzE,2BAA2B;AACd,QAAA,8BAA8B,GAAG,IAAI,2BAA2B,EAAE,CAAC;AACnE,QAAA,4BAA4B,GAAG,IAAI,yBAAyB,EAAE,CAAC;AAC/D,QAAA,sCAAsC,GACjD,IAAI,iCAAiC,EAAE,CAAC;AAE1C;;GAEG;AACH,SAAgB,eAAe;IAC7B,OAAO,kBAAU,CAAC;AACpB,CAAC;AAFD,0CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter } from './Meter';\nimport {\n BatchObservableCallback,\n Counter,\n Histogram,\n MetricOptions,\n ObservableCallback,\n ObservableCounter,\n ObservableGauge,\n ObservableUpDownCounter,\n UpDownCounter,\n MetricAttributes,\n Observable,\n} from './Metric';\n\n/**\n * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses\n * constant NoopMetrics for all of its methods.\n */\nexport class NoopMeter implements Meter {\n constructor() {}\n\n /**\n * @see {@link Meter.createHistogram}\n */\n createHistogram(_name: string, _options?: MetricOptions): Histogram {\n return NOOP_HISTOGRAM_METRIC;\n }\n\n /**\n * @see {@link Meter.createCounter}\n */\n createCounter(_name: string, _options?: MetricOptions): Counter {\n return NOOP_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createUpDownCounter}\n */\n createUpDownCounter(_name: string, _options?: MetricOptions): UpDownCounter {\n return NOOP_UP_DOWN_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableGauge}\n */\n createObservableGauge(\n _name: string,\n _options?: MetricOptions\n ): ObservableGauge {\n return NOOP_OBSERVABLE_GAUGE_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableCounter}\n */\n createObservableCounter(\n _name: string,\n _options?: MetricOptions\n ): ObservableCounter {\n return NOOP_OBSERVABLE_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.createObservableUpDownCounter}\n */\n createObservableUpDownCounter(\n _name: string,\n _options?: MetricOptions\n ): ObservableUpDownCounter {\n return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC;\n }\n\n /**\n * @see {@link Meter.addBatchObservableCallback}\n */\n addBatchObservableCallback(\n _callback: BatchObservableCallback,\n _observables: Observable[]\n ): void {}\n\n /**\n * @see {@link Meter.removeBatchObservableCallback}\n */\n removeBatchObservableCallback(_callback: BatchObservableCallback): void {}\n}\n\nexport class NoopMetric {}\n\nexport class NoopCounterMetric extends NoopMetric implements Counter {\n add(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopUpDownCounterMetric\n extends NoopMetric\n implements UpDownCounter\n{\n add(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopHistogramMetric extends NoopMetric implements Histogram {\n record(_value: number, _attributes: MetricAttributes): void {}\n}\n\nexport class NoopObservableMetric {\n addCallback(_callback: ObservableCallback) {}\n\n removeCallback(_callback: ObservableCallback) {}\n}\n\nexport class NoopObservableCounterMetric\n extends NoopObservableMetric\n implements ObservableCounter {}\n\nexport class NoopObservableGaugeMetric\n extends NoopObservableMetric\n implements ObservableGauge {}\n\nexport class NoopObservableUpDownCounterMetric\n extends NoopObservableMetric\n implements ObservableUpDownCounter {}\n\nexport const NOOP_METER = new NoopMeter();\n\n// Synchronous instruments\nexport const NOOP_COUNTER_METRIC = new NoopCounterMetric();\nexport const NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric();\nexport const NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric();\n\n// Asynchronous instruments\nexport const NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric();\nexport const NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric();\nexport const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC =\n new NoopObservableUpDownCounterMetric();\n\n/**\n * Create a no-op Meter\n */\nexport function createNoopMeter(): Meter {\n return NOOP_METER;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.d.ts b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.d.ts new file mode 100644 index 0000000..8b51bc5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.d.ts @@ -0,0 +1,11 @@ +import { Meter, MeterOptions } from './Meter'; +import { MeterProvider } from './MeterProvider'; +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +export declare class NoopMeterProvider implements MeterProvider { + getMeter(_name: string, _version?: string, _options?: MeterOptions): Meter; +} +export declare const NOOP_METER_PROVIDER: NoopMeterProvider; +//# sourceMappingURL=NoopMeterProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js new file mode 100644 index 0000000..b1c1cc0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js @@ -0,0 +1,31 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0; +const NoopMeter_1 = require("./NoopMeter"); +/** + * An implementation of the {@link MeterProvider} which returns an impotent Meter + * for all calls to `getMeter` + */ +class NoopMeterProvider { + getMeter(_name, _version, _options) { + return NoopMeter_1.NOOP_METER; + } +} +exports.NoopMeterProvider = NoopMeterProvider; +exports.NOOP_METER_PROVIDER = new NoopMeterProvider(); +//# sourceMappingURL=NoopMeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js.map b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js.map new file mode 100644 index 0000000..66117d0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopMeterProvider.js","sourceRoot":"","sources":["../../../src/metrics/NoopMeterProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,2CAAyC;AAEzC;;;GAGG;AACH,MAAa,iBAAiB;IAC5B,QAAQ,CAAC,KAAa,EAAE,QAAiB,EAAE,QAAuB;QAChE,OAAO,sBAAU,CAAC;IACpB,CAAC;CACF;AAJD,8CAIC;AAEY,QAAA,mBAAmB,GAAG,IAAI,iBAAiB,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Meter, MeterOptions } from './Meter';\nimport { MeterProvider } from './MeterProvider';\nimport { NOOP_METER } from './NoopMeter';\n\n/**\n * An implementation of the {@link MeterProvider} which returns an impotent Meter\n * for all calls to `getMeter`\n */\nexport class NoopMeterProvider implements MeterProvider {\n getMeter(_name: string, _version?: string, _options?: MeterOptions): Meter {\n return NOOP_METER;\n }\n}\n\nexport const NOOP_METER_PROVIDER = new NoopMeterProvider();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.d.ts b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.d.ts new file mode 100644 index 0000000..26563f9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.d.ts @@ -0,0 +1,31 @@ +import { MetricAttributes, Observable } from './Metric'; +/** + * Interface that is being used in callback function for Observable Metric. + */ +export interface ObservableResult { + /** + * Observe a measurement of the value associated with the given attributes. + * + * @param value The value to be observed. + * @param attributes The attributes associated with the value. If more than + * one values associated with the same attributes values, SDK may pick the + * last one or simply drop the entire observable result. + */ + observe(this: ObservableResult, value: number, attributes?: AttributesTypes): void; +} +/** + * Interface that is being used in batch observable callback function. + */ +export interface BatchObservableResult { + /** + * Observe a measurement of the value associated with the given attributes. + * + * @param metric The observable metric to be observed. + * @param value The value to be observed. + * @param attributes The attributes associated with the value. If more than + * one values associated with the same attributes values, SDK may pick the + * last one or simply drop the entire observable result. + */ + observe(this: BatchObservableResult, metric: Observable, value: number, attributes?: AttributesTypes): void; +} +//# sourceMappingURL=ObservableResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js new file mode 100644 index 0000000..7e5cbd0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=ObservableResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js.map b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js.map new file mode 100644 index 0000000..450ef74 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ObservableResult.js","sourceRoot":"","sources":["../../../src/metrics/ObservableResult.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { MetricAttributes, Observable } from './Metric';\n\n/**\n * Interface that is being used in callback function for Observable Metric.\n */\nexport interface ObservableResult<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Observe a measurement of the value associated with the given attributes.\n *\n * @param value The value to be observed.\n * @param attributes The attributes associated with the value. If more than\n * one values associated with the same attributes values, SDK may pick the\n * last one or simply drop the entire observable result.\n */\n observe(\n this: ObservableResult,\n value: number,\n attributes?: AttributesTypes\n ): void;\n}\n\n/**\n * Interface that is being used in batch observable callback function.\n */\nexport interface BatchObservableResult<\n AttributesTypes extends MetricAttributes = MetricAttributes,\n> {\n /**\n * Observe a measurement of the value associated with the given attributes.\n *\n * @param metric The observable metric to be observed.\n * @param value The value to be observed.\n * @param attributes The attributes associated with the value. If more than\n * one values associated with the same attributes values, SDK may pick the\n * last one or simply drop the entire observable result.\n */\n observe(\n this: BatchObservableResult,\n metric: Observable,\n value: number,\n attributes?: AttributesTypes\n ): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts new file mode 100644 index 0000000..e73fd73 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js new file mode 100644 index 0000000..15c8d21 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js @@ -0,0 +1,38 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +// Updates to this file should also be replicated to @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +exports._globalThis = typeof globalThis === 'object' + ? globalThis + : typeof self === 'object' + ? self + : typeof window === 'object' + ? window + : typeof global === 'object' + ? global + : {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map new file mode 100644 index 0000000..1c02509 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,6EAA6E;AAE7E;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AACjE,QAAA,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ;IAC5B,CAAC,CAAC,UAAU;IACZ,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ;QAC1B,CAAC,CAAC,IAAI;QACN,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ;YAC5B,CAAC,CAAC,MAAM;YACR,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ;gBAC5B,CAAC,CAAC,MAAM;gBACR,CAAC,CAAE,EAAwB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object'\n ? globalThis\n : typeof self === 'object'\n ? self\n : typeof window === 'object'\n ? window\n : typeof global === 'object'\n ? global\n : ({} as typeof globalThis);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts new file mode 100644 index 0000000..ba20e12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js new file mode 100644 index 0000000..99fd57c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./globalThis"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map new file mode 100644 index 0000000..5a406a9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,+CAA6B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/index.d.ts new file mode 100644 index 0000000..90595da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js b/node_modules/@opentelemetry/api/build/src/platform/index.js new file mode 100644 index 0000000..33b834d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./node"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/index.js.map new file mode 100644 index 0000000..bc13701 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,yCAAuB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts new file mode 100644 index 0000000..e3c83e5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js new file mode 100644 index 0000000..82c4e39 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js @@ -0,0 +1,22 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +exports._globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map new file mode 100644 index 0000000..2f2ca82 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,gEAAgE;AAChE,oEAAoE;AACvD,QAAA,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts new file mode 100644 index 0000000..ba20e12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js b/node_modules/@opentelemetry/api/build/src/platform/node/index.js new file mode 100644 index 0000000..99fd57c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./globalThis"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map new file mode 100644 index 0000000..95561e9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,+CAA6B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation-api.d.ts b/node_modules/@opentelemetry/api/build/src/propagation-api.d.ts new file mode 100644 index 0000000..e12b51b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation-api.d.ts @@ -0,0 +1,4 @@ +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +//# sourceMappingURL=propagation-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation-api.js b/node_modules/@opentelemetry/api/build/src/propagation-api.js new file mode 100644 index 0000000..f014fb4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation-api.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.propagation = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const propagation_1 = require("./api/propagation"); +/** Entrypoint for propagation API */ +exports.propagation = propagation_1.PropagationAPI.getInstance(); +//# sourceMappingURL=propagation-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation-api.js.map b/node_modules/@opentelemetry/api/build/src/propagation-api.js.map new file mode 100644 index 0000000..ff17b74 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation-api.js","sourceRoot":"","sources":["../../src/propagation-api.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,mDAAmD;AACnD,qCAAqC;AACxB,QAAA,WAAW,GAAG,4BAAc,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 0000000..398021f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js new file mode 100644 index 0000000..3f39582 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js @@ -0,0 +1,34 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTextMapPropagator = void 0; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +class NoopTextMapPropagator { + /** Noop inject function does nothing */ + inject(_context, _carrier) { } + /** Noop extract function does nothing and returns the input context */ + extract(context, _carrier) { + return context; + } + fields() { + return []; + } +} +exports.NoopTextMapPropagator = NoopTextMapPropagator; +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 0000000..dfd01aa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH;;GAEG;AACH,MAAa,qBAAqB;IAChC,wCAAwC;IACxC,MAAM,CAAC,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,OAAO,CAAC,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,MAAM;QACJ,OAAO,EAAE,CAAC;IACZ,CAAC;CACF;AAVD,sDAUC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts new file mode 100644 index 0000000..dc39367 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js new file mode 100644 index 0000000..513f33c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js @@ -0,0 +1,41 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; +exports.defaultTextMapGetter = { + get(carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys(carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +exports.defaultTextMapSetter = { + set(carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map new file mode 100644 index 0000000..f233435 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAkGU,QAAA,oBAAoB,GAAkB;IACjD,GAAG,CAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEW,QAAA,oBAAoB,GAAkB;IACjD,GAAG,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace-api.d.ts b/node_modules/@opentelemetry/api/build/src/trace-api.d.ts new file mode 100644 index 0000000..b4751a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace-api.d.ts @@ -0,0 +1,4 @@ +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +//# sourceMappingURL=trace-api.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace-api.js b/node_modules/@opentelemetry/api/build/src/trace-api.js new file mode 100644 index 0000000..c8bbe93 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace-api.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.trace = void 0; +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +const trace_1 = require("./api/trace"); +/** Entrypoint for trace API */ +exports.trace = trace_1.TraceAPI.getInstance(); +//# sourceMappingURL=trace-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace-api.js.map b/node_modules/@opentelemetry/api/build/src/trace-api.js.map new file mode 100644 index 0000000..2475b59 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace-api.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace-api.js","sourceRoot":"","sources":["../../src/trace-api.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sEAAsE;AACtE,qCAAqC;AACrC,uCAAuC;AACvC,+BAA+B;AAClB,QAAA,KAAK,GAAG,gBAAQ,CAAC,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Split module-level variable definition into separate files to allow\n// tree-shaking on each api instance.\nimport { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts new file mode 100644 index 0000000..883cf91 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js new file mode 100644 index 0000000..44e913c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js @@ -0,0 +1,63 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NonRecordingSpan = void 0; +const invalid_span_constants_1 = require("./invalid-span-constants"); +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +class NonRecordingSpan { + constructor(_spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT) { + this._spanContext = _spanContext; + } + // Returns a SpanContext. + spanContext() { + return this._spanContext; + } + // By default does nothing + setAttribute(_key, _value) { + return this; + } + // By default does nothing + setAttributes(_attributes) { + return this; + } + // By default does nothing + addEvent(_name, _attributes) { + return this; + } + // By default does nothing + setStatus(_status) { + return this; + } + // By default does nothing + updateName(_name) { + return this; + } + // By default does nothing + end(_endTime) { } + // isRecording always returns false for NonRecordingSpan. + isRecording() { + return false; + } + // By default does nothing + recordException(_exception, _time) { } +} +exports.NonRecordingSpan = NonRecordingSpan; +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map new file mode 100644 index 0000000..d3d22f4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,qEAAgE;AAKhE;;;;GAIG;AACH,MAAa,gBAAgB;IAC3B,YACmB,eAA4B,6CAAoB;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,WAAW;QACT,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,YAAY,CAAC,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,aAAa,CAAC,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,QAAQ,CAAC,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,SAAS,CAAC,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,UAAU,CAAC,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,GAAG,CAAC,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,WAAW;QACT,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,eAAe,CAAC,UAAqB,EAAE,KAAiB,IAAS,CAAC;CACnE;AA7CD,4CA6CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts new file mode 100644 index 0000000..0e059c9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js new file mode 100644 index 0000000..0a823aa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js @@ -0,0 +1,75 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracer = void 0; +const context_1 = require("../api/context"); +const context_utils_1 = require("../trace/context-utils"); +const NonRecordingSpan_1 = require("./NonRecordingSpan"); +const spancontext_utils_1 = require("./spancontext-utils"); +const contextApi = context_1.ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +class NoopTracer { + // startSpan starts a noop span. + startSpan(name, options, context = contextApi.active()) { + const root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + const parentFromContext = context && (0, context_utils_1.getSpanContext)(context); + if (isSpanContext(parentFromContext) && + (0, spancontext_utils_1.isSpanContextValid)(parentFromContext)) { + return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + } + startActiveSpan(name, arg2, arg3, arg4) { + let opts; + let ctx; + let fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); + const span = this.startSpan(name, opts, parentContext); + const contextWithSpanSet = (0, context_utils_1.setSpan)(parentContext, span); + return contextApi.with(contextWithSpanSet, fn, undefined, span); + } +} +exports.NoopTracer = NoopTracer; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map new file mode 100644 index 0000000..7fde17d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,4CAA4C;AAE5C,0DAAiE;AACjE,yDAAsD;AAEtD,2DAAyD;AAKzD,MAAM,UAAU,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC;AAE5C;;GAEG;AACH,MAAa,UAAU;IACrB,gCAAgC;IAChC,SAAS,CACP,IAAY,EACZ,OAAqB,EACrB,OAAO,GAAG,UAAU,CAAC,MAAM,EAAE;QAE7B,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,mCAAgB,EAAE,CAAC;SAC/B;QAED,MAAM,iBAAiB,GAAG,OAAO,IAAI,IAAA,8BAAc,EAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,IAAA,sCAAkB,EAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,mCAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,mCAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,eAAe,CACb,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,MAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,UAAU,CAAC,MAAM,EAAE,CAAC;QACjD,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,MAAM,kBAAkB,GAAG,IAAA,uBAAO,EAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,UAAU,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAClE,CAAC;CACF;AApED,gCAoEC;AAED,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst contextApi = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(\n name: string,\n options?: SpanOptions,\n context = contextApi.active()\n ): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? contextApi.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return contextApi.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts new file mode 100644 index 0000000..ec0fe79 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js new file mode 100644 index 0000000..c9e08d6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js @@ -0,0 +1,32 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracerProvider = void 0; +const NoopTracer_1 = require("./NoopTracer"); +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +class NoopTracerProvider { + getTracer(_name, _version, _options) { + return new NoopTracer_1.NoopTracer(); + } +} +exports.NoopTracerProvider = NoopTracerProvider; +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map new file mode 100644 index 0000000..c47b350 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,6CAA0C;AAK1C;;;;;GAKG;AACH,MAAa,kBAAkB;IAC7B,SAAS,CACP,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,uBAAU,EAAE,CAAC;IAC1B,CAAC;CACF;AARD,gDAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts new file mode 100644 index 0000000..116cc5c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js new file mode 100644 index 0000000..6677680 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js @@ -0,0 +1,55 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracer = void 0; +const NoopTracer_1 = require("./NoopTracer"); +const NOOP_TRACER = new NoopTracer_1.NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +class ProxyTracer { + constructor(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + startSpan(name, options, context) { + return this._getTracer().startSpan(name, options, context); + } + startActiveSpan(_name, _options, _context, _fn) { + const tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + } + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + _getTracer() { + if (this._delegate) { + return this._delegate; + } + const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + } +} +exports.ProxyTracer = ProxyTracer; +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map new file mode 100644 index 0000000..941125c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,6CAA0C;AAM1C,MAAM,WAAW,GAAG,IAAI,uBAAU,EAAE,CAAC;AAErC;;GAEG;AACH,MAAa,WAAW;IAItB,YACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,SAAS,CAAC,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,eAAe,CACb,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,MAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,UAAU;QAChB,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAC7C,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,OAAO,CACb,CAAC;QAEF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;CACF;AA/CD,kCA+CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(\n this.name,\n this.version,\n this.options\n );\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts new file mode 100644 index 0000000..ee7eafa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js new file mode 100644 index 0000000..75ec910 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js @@ -0,0 +1,54 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracerProvider = void 0; +const ProxyTracer_1 = require("./ProxyTracer"); +const NoopTracerProvider_1 = require("./NoopTracerProvider"); +const NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +class ProxyTracerProvider { + /** + * Get a {@link ProxyTracer} + */ + getTracer(name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); + } + getDelegate() { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + } + /** + * Set the delegate tracer provider + */ + setDelegate(delegate) { + this._delegate = delegate; + } + getDelegateTracer(name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + } +} +exports.ProxyTracerProvider = ProxyTracerProvider; +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map new file mode 100644 index 0000000..682c255 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,+CAA4C;AAC5C,6DAA0D;AAG1D,MAAM,oBAAoB,GAAG,IAAI,uCAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH,MAAa,mBAAmB;IAG9B;;OAEG;IACH,SAAS,CAAC,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,yBAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,WAAW;;QACT,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,iBAAiB,CACf,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;CACF;AA/BD,kDA+BC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts new file mode 100644 index 0000000..c847eaf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts @@ -0,0 +1,31 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js new file mode 100644 index 0000000..6034482 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map new file mode 100644 index 0000000..83ae054 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts new file mode 100644 index 0000000..f2bb495 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts @@ -0,0 +1,49 @@ +import { SpanAttributes } from './attributes'; +import { TraceState } from './trace_state'; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; + /** + * A {@link TraceState} that will be associated with the {@link Span} through + * the new {@link SpanContext}. Samplers SHOULD return the TraceState from + * the passed-in {@link Context} if they do not intend to change it. Leaving + * the value undefined will also leave the TraceState unchanged. + */ + traceState?: TraceState; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js new file mode 100644 index 0000000..6df6b3b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js @@ -0,0 +1,42 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SamplingDecision = void 0; +/** + * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map new file mode 100644 index 0000000..6e4ed4b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH;;;;GAIG;AACH,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,GAAhB,wBAAgB,KAAhB,wBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { TraceState } from './trace_state';\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead.\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n /**\n * A {@link TraceState} that will be associated with the {@link Span} through\n * the new {@link SpanContext}. Samplers SHOULD return the TraceState from\n * the passed-in {@link Context} if they do not intend to change it. Leaving\n * the value undefined will also leave the TraceState unchanged.\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts new file mode 100644 index 0000000..c804568 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js new file mode 100644 index 0000000..cb58230 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map new file mode 100644 index 0000000..049f685 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts new file mode 100644 index 0000000..a2a5d2a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js b/node_modules/@opentelemetry/api/build/src/trace/attributes.js new file mode 100644 index 0000000..c6eb97a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map new file mode 100644 index 0000000..4de58c4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts new file mode 100644 index 0000000..f35f794 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts @@ -0,0 +1,41 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Gets the span from the current context, if one exists. + */ +export declare function getActiveSpan(): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js new file mode 100644 index 0000000..d7e9c3a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js @@ -0,0 +1,82 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getActiveSpan = exports.getSpan = void 0; +const context_1 = require("../context/context"); +const NonRecordingSpan_1 = require("./NonRecordingSpan"); +const context_2 = require("../api/context"); +/** + * span key + */ +const SPAN_KEY = (0, context_1.createContextKey)('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Gets the span from the current context, if one exists. + */ +function getActiveSpan() { + return getSpan(context_2.ContextAPI.getInstance().active()); +} +exports.getActiveSpan = getActiveSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +exports.deleteSpan = deleteSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +exports.getSpanContext = getSpanContext; +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map new file mode 100644 index 0000000..5fa8126 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,gDAAsD;AAItD,yDAAsD;AACtD,4CAA4C;AAE5C;;GAEG;AACH,MAAM,QAAQ,GAAG,IAAA,0BAAgB,EAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,SAAgB,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAFD,0BAEC;AAED;;GAEG;AACH,SAAgB,aAAa;IAC3B,OAAO,OAAO,CAAC,oBAAU,CAAC,WAAW,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC;AACpD,CAAC;AAFD,sCAEC;AAED;;;;;GAKG;AACH,SAAgB,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAFD,0BAEC;AAED;;;;GAIG;AACH,SAAgB,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAFD,gCAEC;AAED;;;;;;GAMG;AACH,SAAgB,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,mCAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AALD,wCAKC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC;AAFD,wCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { ContextAPI } from '../api/context';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Gets the span from the current context, if one exists.\n */\nexport function getActiveSpan(): Span | undefined {\n return getSpan(ContextAPI.getInstance().active());\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts new file mode 100644 index 0000000..9ed5ecb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js new file mode 100644 index 0000000..93c0289 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js @@ -0,0 +1,103 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceStateImpl = void 0; +const tracestate_validators_1 = require("./tracestate-validators"); +const MAX_TRACE_STATE_ITEMS = 32; +const MAX_TRACE_STATE_LEN = 512; +const LIST_MEMBERS_SEPARATOR = ','; +const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +class TraceStateImpl { + constructor(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + set(key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + const traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + } + unset(key) { + const traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + } + get(key) { + return this._internalState.get(key); + } + serialize() { + return this._keys() + .reduce((agg, key) => { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + } + _parse(rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce((agg, part) => { + const listMember = part.trim(); // Optional Whitespace (OWS) handling + const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + const key = listMember.slice(0, i); + const value = listMember.slice(i + 1, part.length); + if ((0, tracestate_validators_1.validateKey)(key) && (0, tracestate_validators_1.validateValue)(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + } + _keys() { + return Array.from(this._internalState.keys()).reverse(); + } + _clone() { + const traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + } +} +exports.TraceStateImpl = TraceStateImpl; +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map new file mode 100644 index 0000000..83eacbe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,mEAAqE;AAErE,MAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,MAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,MAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,MAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH,MAAa,cAAc;IAGzB,YAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,GAAG,CAAC,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,KAAK,CAAC,GAAW;QACf,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,GAAG,CAAC,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,SAAS;QACP,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,CAAC,GAAa,EAAE,GAAG,EAAE,EAAE;YAC7B,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,MAAM,CAAC,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,CAAC,GAAwB,EAAE,IAAY,EAAE,EAAE;YACjD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,MAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,MAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,MAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,IAAA,mCAAW,EAAC,GAAG,CAAC,IAAI,IAAA,qCAAa,EAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,KAAK;QACX,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,MAAM;QACZ,MAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;CACF;AA5ED,wCA4EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts new file mode 100644 index 0000000..4917f99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js new file mode 100644 index 0000000..3e37044 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js @@ -0,0 +1,46 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validateValue = exports.validateKey = void 0; +const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; +const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; +const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); +const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +exports.validateKey = validateKey; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +exports.validateValue = validateValue; +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map new file mode 100644 index 0000000..498abce --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,MAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,MAAM,SAAS,GAAG,QAAQ,oBAAoB,SAAS,CAAC;AACxD,MAAM,gBAAgB,GAAG,WAAW,oBAAoB,gBAAgB,oBAAoB,QAAQ,CAAC;AACrG,MAAM,eAAe,GAAG,IAAI,MAAM,CAAC,OAAO,SAAS,IAAI,gBAAgB,IAAI,CAAC,CAAC;AAC7E,MAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,MAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAFD,kCAEC;AAED;;;GAGG;AACH,SAAgB,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC;AALD,sCAKC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts new file mode 100644 index 0000000..e3b51fe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js new file mode 100644 index 0000000..3d95419 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTraceState = void 0; +const tracestate_impl_1 = require("./tracestate-impl"); +function createTraceState(rawTraceState) { + return new tracestate_impl_1.TraceStateImpl(rawTraceState); +} +exports.createTraceState = createTraceState; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map new file mode 100644 index 0000000..19cd87c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,uDAAmD;AAEnD,SAAgB,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,gCAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC;AAFD,4CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts new file mode 100644 index 0000000..e32dab9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js new file mode 100644 index 0000000..77fb79e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js @@ -0,0 +1,27 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +const trace_flags_1 = require("./trace_flags"); +exports.INVALID_SPANID = '0000000000000000'; +exports.INVALID_TRACEID = '00000000000000000000000000000000'; +exports.INVALID_SPAN_CONTEXT = { + traceId: exports.INVALID_TRACEID, + spanId: exports.INVALID_SPANID, + traceFlags: trace_flags_1.TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map new file mode 100644 index 0000000..54da537 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,+CAA2C;AAE9B,QAAA,cAAc,GAAG,kBAAkB,CAAC;AACpC,QAAA,eAAe,GAAG,kCAAkC,CAAC;AACrD,QAAA,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,uBAAe;IACxB,MAAM,EAAE,sBAAc;IACtB,UAAU,EAAE,wBAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.d.ts b/node_modules/@opentelemetry/api/build/src/trace/link.d.ts new file mode 100644 index 0000000..8fc0106 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.d.ts @@ -0,0 +1,26 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; + /** Count of attributes of the link that were dropped due to collection limits */ + droppedAttributesCount?: number; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js b/node_modules/@opentelemetry/api/build/src/trace/link.js new file mode 100644 index 0000000..8036a63 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js.map b/node_modules/@opentelemetry/api/build/src/trace/link.js.map new file mode 100644 index 0000000..34fae43 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n /** Count of attributes of the link that were dropped due to collection limits */\n droppedAttributesCount?: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span.d.ts new file mode 100644 index 0000000..c5f2814 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js b/node_modules/@opentelemetry/api/build/src/trace/span.js new file mode 100644 index 0000000..b50af46 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js.map b/node_modules/@opentelemetry/api/build/src/trace/span.js.map new file mode 100644 index 0000000..f24165b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts new file mode 100644 index 0000000..f30933a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js b/node_modules/@opentelemetry/api/build/src/trace/span_context.js new file mode 100644 index 0000000..4b7976c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map new file mode 100644 index 0000000..005386d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts new file mode 100644 index 0000000..a89846f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js new file mode 100644 index 0000000..9c06e2c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SpanKind = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map new file mode 100644 index 0000000..c0ba360 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts new file mode 100644 index 0000000..f191111 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js new file mode 100644 index 0000000..dc88f5e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const invalid_span_constants_1 = require("./invalid-span-constants"); +const NonRecordingSpan_1 = require("./NonRecordingSpan"); +const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; +} +exports.isValidTraceId = isValidTraceId; +function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; +} +exports.isValidSpanId = isValidSpanId; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +exports.isSpanContextValid = isSpanContextValid; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +function wrapSpanContext(spanContext) { + return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +} +exports.wrapSpanContext = wrapSpanContext; +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map new file mode 100644 index 0000000..58358a0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,qEAA2E;AAC3E,yDAAsD;AAItD,MAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,MAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,SAAgB,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,wCAAe,CAAC;AAC1E,CAAC;AAFD,wCAEC;AAED,SAAgB,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,uCAAc,CAAC;AACtE,CAAC;AAFD,sCAEC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAJD,gDAIC;AAED;;;;;GAKG;AACH,SAAgB,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,mCAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC;AAFD,0CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.d.ts b/node_modules/@opentelemetry/api/build/src/trace/status.d.ts new file mode 100644 index 0000000..ab19a68 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js b/node_modules/@opentelemetry/api/build/src/trace/status.js new file mode 100644 index 0000000..50cbdef --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SpanStatusCode = void 0; +/** + * An enumeration of status codes. + */ +var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js.map b/node_modules/@opentelemetry/api/build/src/trace/status.js.map new file mode 100644 index 0000000..b921cae --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":";;;AAsBA;;GAEG;AACH,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,GAAd,sBAAc,KAAd,sBAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts new file mode 100644 index 0000000..11288ba --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js new file mode 100644 index 0000000..f8d4dd8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceFlags = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map new file mode 100644 index 0000000..965ce79 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts new file mode 100644 index 0000000..f275b8b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js new file mode 100644 index 0000000..1397038 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map new file mode 100644 index 0000000..267f2e8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts new file mode 100644 index 0000000..2509089 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js b/node_modules/@opentelemetry/api/build/src/trace/tracer.js new file mode 100644 index 0000000..d710ef9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map new file mode 100644 index 0000000..3c3c591 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts new file mode 100644 index 0000000..f3bbccf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js new file mode 100644 index 0000000..3547251 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map new file mode 100644 index 0000000..a743252 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts new file mode 100644 index 0000000..9b2f7a9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js new file mode 100644 index 0000000..4c511db --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map new file mode 100644 index 0000000..31f334a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.d.ts b/node_modules/@opentelemetry/api/build/src/version.d.ts new file mode 100644 index 0000000..5088f1b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.8.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js b/node_modules/@opentelemetry/api/build/src/version.js new file mode 100644 index 0000000..c360774 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.js @@ -0,0 +1,21 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VERSION = void 0; +// this is autogenerated file, see scripts/version-update.js +exports.VERSION = '1.8.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js.map b/node_modules/@opentelemetry/api/build/src/version.js.map new file mode 100644 index 0000000..fcaa99f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,4DAA4D;AAC/C,QAAA,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.8.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/package.json b/node_modules/@opentelemetry/api/package.json new file mode 100644 index 0000000..081d2e0 --- /dev/null +++ b/node_modules/@opentelemetry/api/package.json @@ -0,0 +1,113 @@ +{ + "name": "@opentelemetry/api", + "version": "1.8.0", + "description": "Public API for OpenTelemetry", + "main": "build/src/index.js", + "module": "build/esm/index.js", + "esnext": "build/esnext/index.js", + "types": "build/src/index.d.ts", + "browser": { + "./src/platform/index.ts": "./src/platform/browser/index.ts", + "./build/esm/platform/index.js": "./build/esm/platform/browser/index.js", + "./build/esnext/platform/index.js": "./build/esnext/platform/browser/index.js", + "./build/src/platform/index.js": "./build/src/platform/browser/index.js" + }, + "exports": { + ".": { + "module": "./build/esm/index.js", + "esnext": "./build/esnext/index.js", + "types": "./build/src/index.d.ts", + "default": "./build/src/index.js" + }, + "./experimental": { + "module": "./build/esm/experimental/index.js", + "esnext": "./build/esnext/experimental/index.js", + "types": "./build/src/experimental/index.d.ts", + "default": "./build/src/experimental/index.js" + } + }, + "repository": "open-telemetry/opentelemetry-js", + "scripts": { + "clean": "tsc --build --clean tsconfig.json tsconfig.esm.json tsconfig.esnext.json", + "codecov:browser": "nyc report --reporter=json && codecov -f coverage/*.json -p ../", + "codecov:webworker": "nyc report --reporter=json && codecov -f coverage/*.json -p ../", + "codecov": "nyc report --reporter=json && codecov -f coverage/*.json -p ../", + "precompile": "cross-var lerna run version --scope $npm_package_name --include-dependencies", + "compile": "tsc --build tsconfig.json tsconfig.esm.json tsconfig.esnext.json", + "docs": "typedoc", + "docs:deploy": "gh-pages --dist docs/out", + "docs:test": "linkinator docs/out --silent && linkinator docs/*.md *.md --markdown --silent", + "lint:fix": "eslint . --ext .ts --fix", + "lint": "eslint . --ext .ts", + "test:browser": "karma start --single-run", + "test": "nyc ts-mocha -p tsconfig.json 'test/**/*.test.ts'", + "test:eol": "ts-mocha -p tsconfig.json 'test/**/*.test.ts'", + "test:webworker": "karma start karma.worker.js --single-run", + "cycle-check": "dpdm --exit-code circular:1 src/index.ts", + "version": "node ../scripts/version-update.js", + "prewatch": "npm run precompile", + "watch": "tsc --build --watch tsconfig.json tsconfig.esm.json tsconfig.esnext.json", + "peer-api-check": "node ../scripts/peer-api-check.js" + }, + "keywords": [ + "opentelemetry", + "nodejs", + "browser", + "tracing", + "profiling", + "stats", + "monitoring" + ], + "author": "OpenTelemetry Authors", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + }, + "files": [ + "build/esm/**/*.js", + "build/esm/**/*.js.map", + "build/esm/**/*.d.ts", + "build/esnext/**/*.js", + "build/esnext/**/*.js.map", + "build/esnext/**/*.d.ts", + "build/src/**/*.js", + "build/src/**/*.js.map", + "build/src/**/*.d.ts", + "LICENSE", + "README.md" + ], + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "@types/mocha": "10.0.6", + "@types/node": "18.6.5", + "@types/sinon": "10.0.20", + "@types/webpack": "5.28.5", + "@types/webpack-env": "1.16.3", + "babel-plugin-istanbul": "6.1.1", + "codecov": "3.8.3", + "cross-var": "1.1.0", + "dpdm": "3.13.1", + "karma": "6.4.2", + "karma-chrome-launcher": "3.1.0", + "karma-coverage": "2.2.1", + "karma-mocha": "2.0.1", + "karma-mocha-webworker": "1.3.0", + "karma-spec-reporter": "0.0.36", + "karma-webpack": "4.0.2", + "lerna": "6.6.2", + "memfs": "3.5.3", + "mocha": "10.2.0", + "nyc": "15.1.0", + "sinon": "15.1.2", + "ts-loader": "8.4.0", + "ts-mocha": "10.0.0", + "typescript": "4.4.4", + "unionfs": "4.5.1", + "webpack": "5.89.0" + }, + "homepage": "https://github.com/open-telemetry/opentelemetry-js/tree/main/api", + "sideEffects": false, + "gitHead": "7be35c7845e206b27b682e8ce1cee850b09cec04" +} diff --git a/node_modules/@types/node-fetch/LICENSE b/node_modules/@types/node-fetch/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/node-fetch/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node-fetch/README.md b/node_modules/@types/node-fetch/README.md new file mode 100644 index 0000000..13425ed --- /dev/null +++ b/node_modules/@types/node-fetch/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/node-fetch` + +# Summary +This package contains type definitions for node-fetch (https://github.com/bitinn/node-fetch). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch. + +### Additional Details + * Last updated: Tue, 16 Jan 2024 23:07:00 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node), [form-data](https://npmjs.com/package/form-data) + +# Credits +These definitions were written by [Torsten Werner](https://github.com/torstenwerner), [Niklas Lindgren](https://github.com/nikcorg), [Vinay Bedre](https://github.com/vinaybedre), [Antonio Román](https://github.com/kyranet), [Andrew Leedham](https://github.com/AndrewLeedham), [Jason Li](https://github.com/JasonLi914), [Steve Faulkner](https://github.com/southpolesteve), [ExE Boss](https://github.com/ExE-Boss), [Alex Savin](https://github.com/alexandrusavin), [Alexis Tyler](https://github.com/OmgImAlexis), [Jakub Kisielewski](https://github.com/kbkk), and [David Glasser](https://github.com/glasser). diff --git a/node_modules/@types/node-fetch/externals.d.ts b/node_modules/@types/node-fetch/externals.d.ts new file mode 100644 index 0000000..20f1467 --- /dev/null +++ b/node_modules/@types/node-fetch/externals.d.ts @@ -0,0 +1,32 @@ +// `AbortSignal` is defined here to prevent a dependency on a particular +// implementation like the `abort-controller` package, and to avoid requiring +// the `dom` library in `tsconfig.json`. + +export interface AbortSignal { + aborted: boolean; + reason: any; + + addEventListener: ( + type: "abort", + listener: (this: AbortSignal, event: any) => any, + options?: boolean | { + capture?: boolean | undefined; + once?: boolean | undefined; + passive?: boolean | undefined; + }, + ) => void; + + removeEventListener: ( + type: "abort", + listener: (this: AbortSignal, event: any) => any, + options?: boolean | { + capture?: boolean | undefined; + }, + ) => void; + + dispatchEvent: (event: any) => boolean; + + onabort: null | ((this: AbortSignal, event: any) => any); + + throwIfAborted(): void; +} diff --git a/node_modules/@types/node-fetch/index.d.ts b/node_modules/@types/node-fetch/index.d.ts new file mode 100644 index 0000000..9000ab9 --- /dev/null +++ b/node_modules/@types/node-fetch/index.d.ts @@ -0,0 +1,238 @@ +/// + +import FormData = require("form-data"); +import { RequestOptions } from "http"; +import { URL, URLSearchParams } from "url"; +import { AbortSignal } from "./externals"; + +declare class Request extends Body { + constructor(input: RequestInfo, init?: RequestInit); + clone(): Request; + context: RequestContext; + headers: Headers; + method: string; + redirect: RequestRedirect; + referrer: string; + url: string; + + // node-fetch extensions to the whatwg/fetch spec + agent?: RequestOptions["agent"] | ((parsedUrl: URL) => RequestOptions["agent"]); + compress: boolean; + counter: number; + follow: number; + hostname: string; + port?: number | undefined; + protocol: string; + size: number; + timeout: number; +} + +interface RequestInit { + // whatwg/fetch standard options + body?: BodyInit | undefined; + headers?: HeadersInit | undefined; + method?: string | undefined; + redirect?: RequestRedirect | undefined; + signal?: AbortSignal | null | undefined; + + // node-fetch extensions + agent?: RequestOptions["agent"] | ((parsedUrl: URL) => RequestOptions["agent"]); // =null http.Agent instance, allows custom proxy, certificate etc. + compress?: boolean | undefined; // =true support gzip/deflate content encoding. false to disable + follow?: number | undefined; // =20 maximum redirect count. 0 to not follow redirect + size?: number | undefined; // =0 maximum response body size in bytes. 0 to disable + timeout?: number | undefined; // =0 req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies) + + // node-fetch does not support mode, cache or credentials options +} + +type RequestContext = + | "audio" + | "beacon" + | "cspreport" + | "download" + | "embed" + | "eventsource" + | "favicon" + | "fetch" + | "font" + | "form" + | "frame" + | "hyperlink" + | "iframe" + | "image" + | "imageset" + | "import" + | "internal" + | "location" + | "manifest" + | "object" + | "ping" + | "plugin" + | "prefetch" + | "script" + | "serviceworker" + | "sharedworker" + | "style" + | "subresource" + | "track" + | "video" + | "worker" + | "xmlhttprequest" + | "xslt"; +type RequestMode = "cors" | "no-cors" | "same-origin"; +type RequestRedirect = "error" | "follow" | "manual"; +type RequestCredentials = "omit" | "include" | "same-origin"; + +type RequestCache = + | "default" + | "force-cache" + | "no-cache" + | "no-store" + | "only-if-cached" + | "reload"; + +declare class Headers implements Iterable<[string, string]> { + constructor(init?: HeadersInit); + forEach(callback: (value: string, name: string) => void): void; + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + raw(): { [k: string]: string[] }; + set(name: string, value: string): void; + + // Iterable methods + entries(): IterableIterator<[string, string]>; + keys(): IterableIterator; + values(): IterableIterator; + [Symbol.iterator](): Iterator<[string, string]>; +} + +type BlobPart = ArrayBuffer | ArrayBufferView | Blob | string; + +interface BlobOptions { + type?: string | undefined; + endings?: "transparent" | "native" | undefined; +} + +declare class Blob { + constructor(blobParts?: BlobPart[], options?: BlobOptions); + readonly type: string; + readonly size: number; + slice(start?: number, end?: number): Blob; + text(): Promise; +} + +declare class Body { + constructor(body?: any, opts?: { size?: number | undefined; timeout?: number | undefined }); + arrayBuffer(): Promise; + blob(): Promise; + body: NodeJS.ReadableStream; + bodyUsed: boolean; + buffer(): Promise; + json(): Promise; + size: number; + text(): Promise; + textConverted(): Promise; + timeout: number; +} + +interface SystemError extends Error { + code?: string | undefined; +} + +declare class AbortError extends Error { + readonly name: "AbortError"; + constructor(message: string); + readonly type: "aborted"; +} + +declare class FetchError extends Error { + name: "FetchError"; + constructor(message: string, type: string, systemError?: SystemError); + type: string; + code?: string | undefined; + errno?: string | undefined; +} + +declare class Response extends Body { + constructor(body?: BodyInit, init?: ResponseInit); + static error(): Response; + static redirect(url: string, status: number): Response; + clone(): Response; + headers: Headers; + ok: boolean; + redirected: boolean; + status: number; + statusText: string; + type: ResponseType; + url: string; +} + +type ResponseType = + | "basic" + | "cors" + | "default" + | "error" + | "opaque" + | "opaqueredirect"; + +interface ResponseInit { + headers?: HeadersInit | undefined; + size?: number | undefined; + status?: number | undefined; + statusText?: string | undefined; + timeout?: number | undefined; + url?: string | undefined; + counter?: number | undefined; +} + +interface URLLike { + href: string; +} + +type HeadersInit = Headers | string[][] | { [key: string]: string | string[] }; +type BodyInit = + | ArrayBuffer + | ArrayBufferView + | NodeJS.ReadableStream + | string + | URLSearchParams + | FormData; +type RequestInfo = string | URLLike | Request; + +declare function fetch( + url: RequestInfo, + init?: RequestInit, +): Promise; + +declare namespace fetch { + export { + AbortError, + Blob, + Body, + BodyInit, + FetchError, + Headers, + HeadersInit, + // HeaderInit is exported to support backwards compatibility. See PR #34382 + HeadersInit as HeaderInit, + Request, + RequestCache, + RequestContext, + RequestCredentials, + RequestInfo, + RequestInit, + RequestMode, + RequestRedirect, + Response, + ResponseInit, + ResponseType, + }; + export function isRedirect(code: number): boolean; + + import _default = fetch; + export { _default as default }; +} + +export = fetch; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/License b/node_modules/@types/node-fetch/node_modules/form-data/License new file mode 100644 index 0000000..c7ff12a --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak b/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak new file mode 100644 index 0000000..298a1a2 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/Readme.md b/node_modules/@types/node-fetch/node_modules/form-data/Readme.md new file mode 100644 index 0000000..298a1a2 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/Readme.md @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts b/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts new file mode 100644 index 0000000..295e9e9 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000..09e7c70 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000..18dc819 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js @@ -0,0 +1,501 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var Stream = require('stream').Stream; +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000..4d35738 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/package.json b/node_modules/@types/node-fetch/node_modules/form-data/package.json new file mode 100644 index 0000000..0f20240 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/@types/node-fetch/package.json b/node_modules/@types/node-fetch/package.json new file mode 100644 index 0000000..84884d5 --- /dev/null +++ b/node_modules/@types/node-fetch/package.json @@ -0,0 +1,83 @@ +{ + "name": "@types/node-fetch", + "version": "2.6.11", + "description": "TypeScript definitions for node-fetch", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch", + "license": "MIT", + "contributors": [ + { + "name": "Torsten Werner", + "githubUsername": "torstenwerner", + "url": "https://github.com/torstenwerner" + }, + { + "name": "Niklas Lindgren", + "githubUsername": "nikcorg", + "url": "https://github.com/nikcorg" + }, + { + "name": "Vinay Bedre", + "githubUsername": "vinaybedre", + "url": "https://github.com/vinaybedre" + }, + { + "name": "Antonio Román", + "githubUsername": "kyranet", + "url": "https://github.com/kyranet" + }, + { + "name": "Andrew Leedham", + "githubUsername": "AndrewLeedham", + "url": "https://github.com/AndrewLeedham" + }, + { + "name": "Jason Li", + "githubUsername": "JasonLi914", + "url": "https://github.com/JasonLi914" + }, + { + "name": "Steve Faulkner", + "githubUsername": "southpolesteve", + "url": "https://github.com/southpolesteve" + }, + { + "name": "ExE Boss", + "githubUsername": "ExE-Boss", + "url": "https://github.com/ExE-Boss" + }, + { + "name": "Alex Savin", + "githubUsername": "alexandrusavin", + "url": "https://github.com/alexandrusavin" + }, + { + "name": "Alexis Tyler", + "githubUsername": "OmgImAlexis", + "url": "https://github.com/OmgImAlexis" + }, + { + "name": "Jakub Kisielewski", + "githubUsername": "kbkk", + "url": "https://github.com/kbkk" + }, + { + "name": "David Glasser", + "githubUsername": "glasser", + "url": "https://github.com/glasser" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node-fetch" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + }, + "typesPublisherContentHash": "912e2c03935d0f960f529de6f688e52c77e78b7ca935198cd500804e69ea371f", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/node_modules/@types/tunnel/LICENSE b/node_modules/@types/tunnel/LICENSE new file mode 100755 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/tunnel/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/tunnel/README.md b/node_modules/@types/tunnel/README.md new file mode 100755 index 0000000..a2d8b61 --- /dev/null +++ b/node_modules/@types/tunnel/README.md @@ -0,0 +1,67 @@ +# Installation +> `npm install --save @types/tunnel` + +# Summary +This package contains type definitions for tunnel (https://github.com/koichik/node-tunnel/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel/index.d.ts) +````ts +// Type definitions for tunnel 0.0 +// Project: https://github.com/koichik/node-tunnel/ +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// +import { Agent } from 'http'; +import { Agent as HttpsAgent } from 'https'; + +export function httpOverHttp(options?: HttpOptions): Agent; +export function httpsOverHttp(options?: HttpsOverHttpOptions): Agent; +export function httpOverHttps(options?: HttpOverHttpsOptions): HttpsAgent; +export function httpsOverHttps(options?: HttpsOverHttpsOptions): HttpsAgent; + +export interface HttpOptions { + maxSockets?: number | undefined; + proxy?: ProxyOptions | undefined; +} + +export interface HttpsOverHttpOptions extends HttpOptions { + ca?: Buffer[] | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +export interface HttpOverHttpsOptions extends HttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface HttpsOverHttpsOptions extends HttpsOverHttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface ProxyOptions { + host: string; + port: number; + localAddress?: string | undefined; + proxyAuth?: string | undefined; + headers?: { [key: string]: any } | undefined; +} + +export interface HttpsProxyOptions extends ProxyOptions { + ca?: Buffer[] | undefined; + servername?: string | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +```` + +### Additional Details + * Last updated: Fri, 02 Jul 2021 19:37:21 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node) + * Global values: none + +# Credits +These definitions were written by [BendingBender](https://github.com/BendingBender). diff --git a/node_modules/@types/tunnel/index.d.ts b/node_modules/@types/tunnel/index.d.ts new file mode 100755 index 0000000..591eb21 --- /dev/null +++ b/node_modules/@types/tunnel/index.d.ts @@ -0,0 +1,47 @@ +// Type definitions for tunnel 0.0 +// Project: https://github.com/koichik/node-tunnel/ +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// +import { Agent } from 'http'; +import { Agent as HttpsAgent } from 'https'; + +export function httpOverHttp(options?: HttpOptions): Agent; +export function httpsOverHttp(options?: HttpsOverHttpOptions): Agent; +export function httpOverHttps(options?: HttpOverHttpsOptions): HttpsAgent; +export function httpsOverHttps(options?: HttpsOverHttpsOptions): HttpsAgent; + +export interface HttpOptions { + maxSockets?: number | undefined; + proxy?: ProxyOptions | undefined; +} + +export interface HttpsOverHttpOptions extends HttpOptions { + ca?: Buffer[] | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +export interface HttpOverHttpsOptions extends HttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface HttpsOverHttpsOptions extends HttpsOverHttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface ProxyOptions { + host: string; + port: number; + localAddress?: string | undefined; + proxyAuth?: string | undefined; + headers?: { [key: string]: any } | undefined; +} + +export interface HttpsProxyOptions extends ProxyOptions { + ca?: Buffer[] | undefined; + servername?: string | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} diff --git a/node_modules/@types/tunnel/package.json b/node_modules/@types/tunnel/package.json new file mode 100755 index 0000000..5d3c8bb --- /dev/null +++ b/node_modules/@types/tunnel/package.json @@ -0,0 +1,27 @@ +{ + "name": "@types/tunnel", + "version": "0.0.3", + "description": "TypeScript definitions for tunnel", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel", + "license": "MIT", + "contributors": [ + { + "name": "BendingBender", + "url": "https://github.com/BendingBender", + "githubUsername": "BendingBender" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/tunnel" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*" + }, + "typesPublisherContentHash": "049a52929c6badcca87fc0ebc590bc49744a75f883bf18df77393ba05ddb4e7b", + "typeScriptVersion": "3.6" +} \ No newline at end of file diff --git a/node_modules/abort-controller/LICENSE b/node_modules/abort-controller/LICENSE new file mode 100644 index 0000000..c914149 --- /dev/null +++ b/node_modules/abort-controller/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/abort-controller/README.md b/node_modules/abort-controller/README.md new file mode 100644 index 0000000..9de3e45 --- /dev/null +++ b/node_modules/abort-controller/README.md @@ -0,0 +1,98 @@ +# abort-controller + +[![npm version](https://img.shields.io/npm/v/abort-controller.svg)](https://www.npmjs.com/package/abort-controller) +[![Downloads/month](https://img.shields.io/npm/dm/abort-controller.svg)](http://www.npmtrends.com/abort-controller) +[![Build Status](https://travis-ci.org/mysticatea/abort-controller.svg?branch=master)](https://travis-ci.org/mysticatea/abort-controller) +[![Coverage Status](https://codecov.io/gh/mysticatea/abort-controller/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/abort-controller) +[![Dependency Status](https://david-dm.org/mysticatea/abort-controller.svg)](https://david-dm.org/mysticatea/abort-controller) + +An implementation of [WHATWG AbortController interface](https://dom.spec.whatwg.org/#interface-abortcontroller). + +```js +import AbortController from "abort-controller" + +const controller = new AbortController() +const signal = controller.signal + +signal.addEventListener("abort", () => { + console.log("aborted!") +}) + +controller.abort() +``` + +> https://jsfiddle.net/1r2994qp/1/ + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install abort-controller +``` + +Or download from [`dist` directory](./dist). + +- [dist/abort-controller.mjs](dist/abort-controller.mjs) ... ES modules version. +- [dist/abort-controller.js](dist/abort-controller.js) ... Common JS version. +- [dist/abort-controller.umd.js](dist/abort-controller.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +### Basic + +```js +import AbortController from "abort-controller" +// or +const AbortController = require("abort-controller") + +// or UMD version defines a global variable: +const AbortController = window.AbortControllerShim +``` + +If your bundler recognizes `browser` field of `package.json`, the imported `AbortController` is the native one and it doesn't contain shim (even if the native implementation was nothing). +If you wanted to polyfill `AbortController` for IE, use `abort-controller/polyfill`. + +### Polyfilling + +Importing `abort-controller/polyfill` assigns the `AbortController` shim to the `AbortController` global variable if the native implementation was nothing. + +```js +import "abort-controller/polyfill" +// or +require("abort-controller/polyfill") +``` + +### API + +#### AbortController + +> https://dom.spec.whatwg.org/#interface-abortcontroller + +##### controller.signal + +The [AbortSignal](https://dom.spec.whatwg.org/#interface-AbortSignal) object which is associated to this controller. + +##### controller.abort() + +Notify `abort` event to listeners that the `signal` has. + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/abort-controller/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/abort-controller/browser.js b/node_modules/abort-controller/browser.js new file mode 100644 index 0000000..b0c5ec3 --- /dev/null +++ b/node_modules/abort-controller/browser.js @@ -0,0 +1,13 @@ +/*globals self, window */ +"use strict" + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +module.exports = AbortController +module.exports.AbortSignal = AbortSignal +module.exports.default = AbortController diff --git a/node_modules/abort-controller/browser.mjs b/node_modules/abort-controller/browser.mjs new file mode 100644 index 0000000..a8f321a --- /dev/null +++ b/node_modules/abort-controller/browser.mjs @@ -0,0 +1,11 @@ +/*globals self, window */ + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.d.ts b/node_modules/abort-controller/dist/abort-controller.d.ts new file mode 100644 index 0000000..75852fb --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.d.ts @@ -0,0 +1,43 @@ +import { EventTarget } from "event-target-shim" + +type Events = { + abort: any +} +type EventAttributes = { + onabort: any +} +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +declare class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() + /** + * Returns `true` if this `AbortSignal`"s `AbortController` has signaled to abort, and `false` otherwise. + */ + readonly aborted: boolean +} +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +declare class AbortController { + /** + * Initialize this controller. + */ + constructor() + /** + * Returns the `AbortSignal` object associated with this object. + */ + readonly signal: AbortSignal + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort(): void +} + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.js b/node_modules/abort-controller/dist/abort-controller.js new file mode 100644 index 0000000..49af739 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js @@ -0,0 +1,127 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var eventTargetShim = require('event-target-shim'); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports.default = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map diff --git a/node_modules/abort-controller/dist/abort-controller.js.map b/node_modules/abort-controller/dist/abort-controller.js.map new file mode 100644 index 0000000..cfdcafd --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"abort-controller.js","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["EventTarget","defineEventAttribute"],"mappings":";;;;;;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQA,2BAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACDC,oCAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnDD,2BAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/abort-controller/dist/abort-controller.mjs b/node_modules/abort-controller/dist/abort-controller.mjs new file mode 100644 index 0000000..88ba22d --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs @@ -0,0 +1,118 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +import { EventTarget, defineEventAttribute } from 'event-target-shim'; + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +export default AbortController; +export { AbortController, AbortSignal }; +//# sourceMappingURL=abort-controller.mjs.map diff --git a/node_modules/abort-controller/dist/abort-controller.mjs.map b/node_modules/abort-controller/dist/abort-controller.mjs.map new file mode 100644 index 0000000..1e8fa6b --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"abort-controller.mjs","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":[],"mappings":";;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;"} \ No newline at end of file diff --git a/node_modules/abort-controller/dist/abort-controller.umd.js b/node_modules/abort-controller/dist/abort-controller.umd.js new file mode 100644 index 0000000..f643cfd --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.umd.js @@ -0,0 +1,5 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d\n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexport default EventTarget;\nexport { defineEventAttribute, EventTarget };\n//# sourceMappingURL=event-target-shim.mjs.map\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","createAbortSignal","signal","AbortSignal","EventTarget","abortedFlags","abortSignal","dispatchEvent","type","getSignal","controller","signals","TypeError","WeakMap","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","aborted","defineEventAttribute","defineProperties","Symbol","_typeof","toStringTag","AbortController","abort"],"mappings":";;;+3CAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZ,CAAgBH,CAAhB,QACbI,CAAAA,OAAO,CAACC,MAAR,CACY,IAAR,EAAAJ,CADJ,CAEI,6CAFJ,CAGID,CAHJ,EAKOC,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxB,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAAL,CAAWS,UAbS,GAiBzBF,CAAI,CAACG,QAAL,GAjByB,CAkBgB,UAArC,QAAOH,CAAAA,CAAI,CAACP,KAAL,CAAWW,cAlBG,EAmBrBJ,CAAI,CAACP,KAAL,CAAWW,cAAX,EAnBqB,QAGE,WAAnB,QAAOP,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAAR,CACI,oEADJ,CAEIL,CAAI,CAACC,eAFT,CANiB,EAiC7B,QAASK,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZ,CAAgB,IAAhB,CAAsB,CAClBD,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAAN,EAAmBC,IAAI,CAACC,GAAL,EATZ,CAAtB,CAD+B,CAc/BC,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4B,WAA5B,CAAyC,CAAEC,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzC,CAd+B,QAmBrBC,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAY5B,CAAZ,EACJ6B,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4BG,CAA5B,CAAiCI,CAAwB,CAACJ,CAAD,CAAzD,EAyOZ,QAASI,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,CAFR,CAAA,CAIHZ,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,EAAsBF,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAF,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL,CAAWO,KAAX,CAAiBlC,CAAjB,CAAwBmC,SAAxB,CAHR,CAAA,CAKHH,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAV,CAAe,IAAf,CAAqB1B,CAArB,CAAkCd,CAAlC,KAPE4B,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAYU,CAAZ,KACO,CAAhB,GAAAV,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZ,CAAwBlB,MAAM,CAACmB,MAAP,CAAcL,CAAS,CAACI,SAAxB,CAAmC,CACvDE,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnC,CAXa,KAgBhC,GACKjB,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAP,CAAgCR,CAAhC,CAAuCX,CAAvC,CADY,CAEzBoB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAP,CACIe,CAAW,CAACE,SADhB,CAEId,CAFJ,CAGIoB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlC,QAUDY,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAAT,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT,CAAamC,CAAb,QACC,KAAX,EAAAW,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBb,CAAtB,CAAD,CAAX,CAA2CA,CAA3C,EACvBY,CAAQ,CAACnC,GAAT,CAAauB,CAAb,CAAoBW,CAApB,GAEGA,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBnD,CAAtB,CAAD,QACnB,IAAIqD,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAF,CAAUmB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAF,CAAUgB,UAAV,CAAuBA,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAF,CAAUiB,aAAV,CAA0BA,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAF,CAAUQ,eAAV,CAA4BA,ysCC1ahBkD,CAAAA,OACNC,CAAAA,CAAM,CAAGpC,MAAM,CAACmB,MAAPnB,CAAcqC,CAAW,CAACnB,SAA1BlB,QACfsC,CAAAA,CAAW,CAACrB,IAAZqB,CAAiBF,CAAjBE,EACAC,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACOH,UAMKI,CAAAA,EAAYJ,GACpBG,KAAAA,CAAY,CAAC3D,GAAb2D,CAAiBH,CAAjBG,IAIJA,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACAH,CAAM,CAACK,aAAPL,CAA8B,CAAEM,IAAI,CAAE,OAAR,CAA9BN,GC9BJ,QAASO,CAAAA,CAAT,CAAmBC,CAAnB,KACUR,CAAAA,CAAM,CAAGS,CAAO,CAACjE,GAARiE,CAAYD,CAAZC,KACD,IAAVT,EAAAA,OACM,IAAIU,CAAAA,SAAJ,sEAEiB,IAAfF,GAAAA,CAAU,CAAY,MAAZ,GAA4BA,GAFxC,QAMHR,CAAAA,KF3BLzD,CAAAA,CAAW,CAAG,GAAIoE,CAAAA,QAOlBpB,CAAQ,CAAG,GAAIoB,CAAAA,QAkFrBzD,CAAK,CAAC4B,SAAN,CAAkB,IAKVwB,CAAAA,MAAO,OACAlE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiE,IANZ,CAAA,IAaVM,CAAAA,QAAS,OACFxE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASkB,aAtBN,CAAA,CA4BduD,YA5Bc,WA4BC,IACLvD,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAF,CAASkB,cADpB,MAEU,KAAjB,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVwD,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV5D,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASiB,UAzEN,CAAA,CAgFd6D,eAhFc,WAgFI,IACRtE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHc,CAI4B,UAAtC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAW6E,eAJR,EAKVtE,CAAI,CAACP,KAAL,CAAW6E,eAAX,EArFM,CAAA,CA6FdC,wBA7Fc,WA6Fa,IACjBvE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHuB,CAIvBX,CAAI,CAACY,gBAAL,GAJuB,CAK4B,UAA/C,QAAOZ,CAAAA,CAAI,CAACP,KAAL,CAAW8E,wBALC,EAMnBvE,CAAI,CAACP,KAAL,CAAW8E,wBAAX,EAnGM,CAAA,IA2GVC,CAAAA,SAAU,SACKhF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe+E,OA5GpB,CAAA,IAmHVtE,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeS,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIViF,CAAAA,kBAAmB,OACZjF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASW,QApIN,CAAA,IA2IVuE,CAAAA,UAAW,SACIlF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiF,QA5IpB,CAAA,IAmJV7D,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASqB,SApJN,CAAA,IA4JV8D,CAAAA,YAAa,OACNnF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WA7JN,CAAA,IAqKVqE,CAAAA,cAAe,OACRpF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASmB,OAtKN,CAAA,IAwKViE,CAAAA,aAAa1D,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,IACuC,SAAnC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAWmF,eAClB5E,CAAI,CAACP,KAAL,CAAWmF,YAAX,KAhLM,CAAA,IAyLVC,CAAAA,aAAc,OACP,CAACrF,CAAE,CAAC,IAAD,CAAF,CAASW,QA1LP,CAAA,IA4LV0E,CAAAA,YAAY3D,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMdsF,SAzMc,WAyMF,EAzME,EA+MlB9D,MAAM,CAACC,cAAP,CAAsBX,CAAK,CAAC4B,SAA5B,CAAuC,aAAvC,CAAsD,CAClDhB,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtD,EAOsB,WAAlB,QAAO0C,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAACzE,QAC/CU,MAAM,CAACgE,cAAP,CAAsB1E,CAAK,CAAC4B,SAA5B,CAAuC6C,MAAM,CAACzE,KAAP,CAAa4B,SAApD,EAGAS,CAAQ,CAACnC,GAAT,CAAauE,MAAM,CAACzE,KAAP,CAAa4B,SAA1B,CAAqC5B,CAArC,wiDChTiB+C,CAAAA,2EAMP,GAAIS,CAAAA,SAAJ,CAAc,4CAAd,sDAOAmB,CAAAA,CAAO,CAAG1B,CAAY,CAAC3D,GAAb2D,CAAiB,IAAjBA,KACO,SAAnB,QAAO0B,CAAAA,OACD,IAAInB,CAAAA,SAAJ,kEAEW,IAAT,QAAgB,MAAhB,GAAgC,MAFlC,QAMHmB,CAAAA,SArB0B3B,GAwBzC4B,CAAoB,CAAC7B,CAAW,CAACnB,SAAb,CAAwB,OAAxB,EA2BpB,GAAMqB,CAAAA,CAAY,CAAG,GAAIQ,CAAAA,OAAzB,CAGA/C,MAAM,CAACmE,gBAAPnE,CAAwBqC,CAAW,CAACnB,SAApClB,CAA+C,CAC3CiE,OAAO,CAAE,CAAE9D,UAAU,GAAZ,CADkC,CAA/CH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBqC,CAAW,CAACnB,SAAlClB,CAA6CoE,MAAM,CAACE,WAApDtE,CAAiE,CAC7DS,YAAY,GADiD,CAE7DP,KAAK,CAAE,aAFsD,CAAjEF,KC5EiBuE,CAAAA,oCAKb1B,CAAO,CAACrD,GAARqD,CAAY,IAAZA,CAAkBV,CAAiB,EAAnCU,4CAcAL,CAAW,CAACG,CAAS,CAAC,IAAD,CAAV,uCAPJA,CAAAA,CAAS,CAAC,IAAD,WAclBE,CAAO,CAAG,GAAIE,CAAAA,WAkBpB/C,MAAM,CAACmE,gBAAPnE,CAAwBuE,CAAe,CAACrD,SAAxClB,CAAmD,CAC/CoC,MAAM,CAAE,CAAEjC,UAAU,GAAZ,CADuC,CAE/CqE,KAAK,CAAE,CAAErE,UAAU,GAAZ,CAFwC,CAAnDH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBuE,CAAe,CAACrD,SAAtClB,CAAiDoE,MAAM,CAACE,WAAxDtE,CAAqE,CACjES,YAAY,GADqD,CAEjEP,KAAK,CAAE,iBAF0D,CAArEF"} \ No newline at end of file diff --git a/node_modules/abort-controller/package.json b/node_modules/abort-controller/package.json new file mode 100644 index 0000000..fc705e0 --- /dev/null +++ b/node_modules/abort-controller/package.json @@ -0,0 +1,97 @@ +{ + "name": "abort-controller", + "version": "3.0.0", + "description": "An implementation of WHATWG AbortController interface.", + "main": "dist/abort-controller", + "files": [ + "dist", + "polyfill.*", + "browser.*" + ], + "engines": { + "node": ">=6.5" + }, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "browser": "./browser.js", + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.3.0", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "@types/mocha": "^5.2.5", + "@types/node": "^10.12.18", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "dts-bundle-generator": "^2.0.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.1.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.2", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-typescript": "^1.0.0", + "rollup-watch": "^4.3.1", + "ts-node": "^8.0.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run -s build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "opener coverage/lcov-report/index.html", + "lint": "eslint . --ext .ts", + "build": "run-s -s build:*", + "build:rollup": "rollup -c", + "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts", + "test": "run-s -s lint test:*", + "test:mocha": "nyc mocha test/*.ts", + "test:karma": "karma start --single-run", + "watch": "run-p -s watch:*", + "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl", + "watch:karma": "karma start --watch", + "codecov": "codecov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mysticatea/abort-controller.git" + }, + "keywords": [ + "w3c", + "whatwg", + "event", + "events", + "abort", + "cancel", + "abortcontroller", + "abortsignal", + "controller", + "signal", + "shim" + ], + "author": "Toru Nagashima (https://github.com/mysticatea)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/abort-controller/issues" + }, + "homepage": "https://github.com/mysticatea/abort-controller#readme" +} diff --git a/node_modules/abort-controller/polyfill.js b/node_modules/abort-controller/polyfill.js new file mode 100644 index 0000000..3ca8923 --- /dev/null +++ b/node_modules/abort-controller/polyfill.js @@ -0,0 +1,21 @@ +/*globals require, self, window */ +"use strict" + +const ac = require("./dist/abort-controller") + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/abort-controller/polyfill.mjs b/node_modules/abort-controller/polyfill.mjs new file mode 100644 index 0000000..0602a64 --- /dev/null +++ b/node_modules/abort-controller/polyfill.mjs @@ -0,0 +1,19 @@ +/*globals self, window */ +import * as ac from "./dist/abort-controller" + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/asynckit/LICENSE b/node_modules/asynckit/LICENSE new file mode 100644 index 0000000..c9eca5d --- /dev/null +++ b/node_modules/asynckit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Alex Indigo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md new file mode 100644 index 0000000..ddcc7e6 --- /dev/null +++ b/node_modules/asynckit/README.md @@ -0,0 +1,233 @@ +# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit) + +Minimal async jobs utility library, with streams support. + +[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit) +[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit) + +[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master) +[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit) +[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit) + + + +AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. +Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. + +It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. + +| compression | size | +| :----------------- | -------: | +| asynckit.js | 12.34 kB | +| asynckit.min.js | 4.11 kB | +| asynckit.min.js.gz | 1.47 kB | + + +## Install + +```sh +$ npm install --save asynckit +``` + +## Examples + +### Parallel Jobs + +Runs iterator over provided array in parallel. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will terminate rest of the active jobs (if abort function is provided) +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var parallel = require('asynckit').parallel + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , target = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// async job accepts one element from the array +// and a callback function +function asyncJob(item, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var parallel = require('asynckit/parallel') + , assert = require('assert') + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] + , target = [] + , keys = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); + assert.deepEqual(keys, expectedKeys); +}); + +// supports full value, key, callback (shortcut) interface +function asyncJob(item, key, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + keys.push(key); + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). + +### Serial Jobs + +Runs iterator over provided array sequentially. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will not proceed to the rest of the items in the list +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var serial = require('asynckit/serial') + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// extended interface (item, key, callback) +// also supported for arrays +function asyncJob(item, key, cb) +{ + target.push(key); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var serial = require('asynckit').serial + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , target = [] + ; + + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// shortcut interface (item, callback) +// works for object as well as for the arrays +function asyncJob(item, cb) +{ + target.push(item); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). + +_Note: Since _object_ is an _unordered_ collection of properties, +it may produce unexpected results with sequential iterations. +Whenever order of the jobs' execution is important please use `serialOrdered` method._ + +### Ordered Serial Iterations + +TBD + +For example [compare-property](compare-property) package. + +### Streaming interface + +TBD + +## Want to Know More? + +More examples can be found in [test folder](test/). + +Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. + +## License + +AsyncKit is licensed under the MIT license. diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js new file mode 100644 index 0000000..c612f1a --- /dev/null +++ b/node_modules/asynckit/bench.js @@ -0,0 +1,76 @@ +/* eslint no-console: "off" */ + +var asynckit = require('./') + , async = require('async') + , assert = require('assert') + , expected = 0 + ; + +var Benchmark = require('benchmark'); +var suite = new Benchmark.Suite; + +var source = []; +for (var z = 1; z < 100; z++) +{ + source.push(z); + expected += z; +} + +suite +// add tests + +.add('async.map', function(deferred) +{ + var total = 0; + + async.map(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +.add('asynckit.parallel', function(deferred) +{ + var total = 0; + + asynckit.parallel(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +// add listeners +.on('cycle', function(ev) +{ + console.log(String(ev.target)); +}) +.on('complete', function() +{ + console.log('Fastest is ' + this.filter('fastest').map('name')); +}) +// run async +.run({ 'async': true }); diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js new file mode 100644 index 0000000..455f945 --- /dev/null +++ b/node_modules/asynckit/index.js @@ -0,0 +1,6 @@ +module.exports = +{ + parallel : require('./parallel.js'), + serial : require('./serial.js'), + serialOrdered : require('./serialOrdered.js') +}; diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js new file mode 100644 index 0000000..114367e --- /dev/null +++ b/node_modules/asynckit/lib/abort.js @@ -0,0 +1,29 @@ +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js new file mode 100644 index 0000000..7f1288a --- /dev/null +++ b/node_modules/asynckit/lib/async.js @@ -0,0 +1,34 @@ +var defer = require('./defer.js'); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js new file mode 100644 index 0000000..b67110c --- /dev/null +++ b/node_modules/asynckit/lib/defer.js @@ -0,0 +1,26 @@ +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js new file mode 100644 index 0000000..5d2839a --- /dev/null +++ b/node_modules/asynckit/lib/iterate.js @@ -0,0 +1,75 @@ +var async = require('./async.js') + , abort = require('./abort.js') + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js new file mode 100644 index 0000000..78ad240 --- /dev/null +++ b/node_modules/asynckit/lib/readable_asynckit.js @@ -0,0 +1,91 @@ +var streamify = require('./streamify.js') + , defer = require('./defer.js') + ; + +// API +module.exports = ReadableAsyncKit; + +/** + * Base constructor for all streams + * used to hold properties/methods + */ +function ReadableAsyncKit() +{ + ReadableAsyncKit.super_.apply(this, arguments); + + // list of active jobs + this.jobs = {}; + + // add stream methods + this.destroy = destroy; + this._start = _start; + this._read = _read; +} + +/** + * Destroys readable stream, + * by aborting outstanding jobs + * + * @returns {void} + */ +function destroy() +{ + if (this.destroyed) + { + return; + } + + this.destroyed = true; + + if (typeof this.terminator == 'function') + { + this.terminator(); + } +} + +/** + * Starts provided jobs in async manner + * + * @private + */ +function _start() +{ + // first argument – runner function + var runner = arguments[0] + // take away first argument + , args = Array.prototype.slice.call(arguments, 1) + // second argument - input data + , input = args[0] + // last argument - result callback + , endCb = streamify.callback.call(this, args[args.length - 1]) + ; + + args[args.length - 1] = endCb; + // third argument - iterator + args[1] = streamify.iterator.call(this, args[1]); + + // allow time for proper setup + defer(function() + { + if (!this.destroyed) + { + this.terminator = runner.apply(null, args); + } + else + { + endCb(null, Array.isArray(input) ? [] : {}); + } + }.bind(this)); +} + + +/** + * Implement _read to comply with Readable streams + * Doesn't really make sense for flowing object mode + * + * @private + */ +function _read() +{ + +} diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js new file mode 100644 index 0000000..5d2929f --- /dev/null +++ b/node_modules/asynckit/lib/readable_parallel.js @@ -0,0 +1,25 @@ +var parallel = require('../parallel.js'); + +// API +module.exports = ReadableParallel; + +/** + * Streaming wrapper to `asynckit.parallel` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableParallel(list, iterator, callback) +{ + if (!(this instanceof ReadableParallel)) + { + return new ReadableParallel(list, iterator, callback); + } + + // turn on object mode + ReadableParallel.super_.call(this, {objectMode: true}); + + this._start(parallel, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js new file mode 100644 index 0000000..7822698 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial.js @@ -0,0 +1,25 @@ +var serial = require('../serial.js'); + +// API +module.exports = ReadableSerial; + +/** + * Streaming wrapper to `asynckit.serial` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerial(list, iterator, callback) +{ + if (!(this instanceof ReadableSerial)) + { + return new ReadableSerial(list, iterator, callback); + } + + // turn on object mode + ReadableSerial.super_.call(this, {objectMode: true}); + + this._start(serial, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js new file mode 100644 index 0000000..3de89c4 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial_ordered.js @@ -0,0 +1,29 @@ +var serialOrdered = require('../serialOrdered.js'); + +// API +module.exports = ReadableSerialOrdered; +// expose sort helpers +module.exports.ascending = serialOrdered.ascending; +module.exports.descending = serialOrdered.descending; + +/** + * Streaming wrapper to `asynckit.serialOrdered` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerialOrdered(list, iterator, sortMethod, callback) +{ + if (!(this instanceof ReadableSerialOrdered)) + { + return new ReadableSerialOrdered(list, iterator, sortMethod, callback); + } + + // turn on object mode + ReadableSerialOrdered.super_.call(this, {objectMode: true}); + + this._start(serialOrdered, list, iterator, sortMethod, callback); +} diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js new file mode 100644 index 0000000..cbea7ad --- /dev/null +++ b/node_modules/asynckit/lib/state.js @@ -0,0 +1,37 @@ +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js new file mode 100644 index 0000000..f56a1c9 --- /dev/null +++ b/node_modules/asynckit/lib/streamify.js @@ -0,0 +1,141 @@ +var async = require('./async.js'); + +// API +module.exports = { + iterator: wrapIterator, + callback: wrapCallback +}; + +/** + * Wraps iterators with long signature + * + * @this ReadableAsyncKit# + * @param {function} iterator - function to wrap + * @returns {function} - wrapped function + */ +function wrapIterator(iterator) +{ + var stream = this; + + return function(item, key, cb) + { + var aborter + , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key)) + ; + + stream.jobs[key] = wrappedCb; + + // it's either shortcut (item, cb) + if (iterator.length == 2) + { + aborter = iterator(item, wrappedCb); + } + // or long format (item, key, cb) + else + { + aborter = iterator(item, key, wrappedCb); + } + + return aborter; + }; +} + +/** + * Wraps provided callback function + * allowing to execute snitch function before + * real callback + * + * @this ReadableAsyncKit# + * @param {function} callback - function to wrap + * @returns {function} - wrapped function + */ +function wrapCallback(callback) +{ + var stream = this; + + var wrapped = function(error, result) + { + return finisher.call(stream, error, result, callback); + }; + + return wrapped; +} + +/** + * Wraps provided iterator callback function + * makes sure snitch only called once, + * but passes secondary calls to the original callback + * + * @this ReadableAsyncKit# + * @param {function} callback - callback to wrap + * @param {number|string} key - iteration key + * @returns {function} wrapped callback + */ +function wrapIteratorCallback(callback, key) +{ + var stream = this; + + return function(error, output) + { + // don't repeat yourself + if (!(key in stream.jobs)) + { + callback(error, output); + return; + } + + // clean up jobs + delete stream.jobs[key]; + + return streamer.call(stream, error, {key: key, value: output}, callback); + }; +} + +/** + * Stream wrapper for iterator callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects iterator results + */ +function streamer(error, output, callback) +{ + if (error && !this.error) + { + this.error = error; + this.pause(); + this.emit('error', error); + // send back value only, as expected + callback(error, output && output.value); + return; + } + + // stream stuff + this.push(output); + + // back to original track + // send back value only, as expected + callback(error, output && output.value); +} + +/** + * Stream wrapper for finishing callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects final results + */ +function finisher(error, output, callback) +{ + // signal end of the stream + // only for successfully finished streams + if (!error) + { + this.push(null); + } + + // back to original track + callback(error, output); +} diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js new file mode 100644 index 0000000..d6eb992 --- /dev/null +++ b/node_modules/asynckit/lib/terminator.js @@ -0,0 +1,29 @@ +var abort = require('./abort.js') + , async = require('./async.js') + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json new file mode 100644 index 0000000..51147d6 --- /dev/null +++ b/node_modules/asynckit/package.json @@ -0,0 +1,63 @@ +{ + "name": "asynckit", + "version": "0.4.0", + "description": "Minimal async jobs utility library, with streams support", + "main": "index.js", + "scripts": { + "clean": "rimraf coverage", + "lint": "eslint *.js lib/*.js test/*.js", + "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec", + "win-test": "tape test/test-*.js", + "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec", + "report": "istanbul report", + "size": "browserify index.js | size-table asynckit", + "debug": "tape test/test-*.js" + }, + "pre-commit": [ + "clean", + "lint", + "test", + "browser", + "report", + "size" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/alexindigo/asynckit.git" + }, + "keywords": [ + "async", + "jobs", + "parallel", + "serial", + "iterator", + "array", + "object", + "stream", + "destroy", + "terminate", + "abort" + ], + "author": "Alex Indigo ", + "license": "MIT", + "bugs": { + "url": "https://github.com/alexindigo/asynckit/issues" + }, + "homepage": "https://github.com/alexindigo/asynckit#readme", + "devDependencies": { + "browserify": "^13.0.0", + "browserify-istanbul": "^2.0.0", + "coveralls": "^2.11.9", + "eslint": "^2.9.0", + "istanbul": "^0.4.3", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.7", + "pre-commit": "^1.1.3", + "reamde": "^1.1.0", + "rimraf": "^2.5.2", + "size-table": "^0.2.0", + "tap-spec": "^4.1.1", + "tape": "^4.5.1" + }, + "dependencies": {} +} diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js new file mode 100644 index 0000000..3c50344 --- /dev/null +++ b/node_modules/asynckit/parallel.js @@ -0,0 +1,43 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js new file mode 100644 index 0000000..6cd949a --- /dev/null +++ b/node_modules/asynckit/serial.js @@ -0,0 +1,17 @@ +var serialOrdered = require('./serialOrdered.js'); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js new file mode 100644 index 0000000..607eafe --- /dev/null +++ b/node_modules/asynckit/serialOrdered.js @@ -0,0 +1,75 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js new file mode 100644 index 0000000..d43465f --- /dev/null +++ b/node_modules/asynckit/stream.js @@ -0,0 +1,21 @@ +var inherits = require('util').inherits + , Readable = require('stream').Readable + , ReadableAsyncKit = require('./lib/readable_asynckit.js') + , ReadableParallel = require('./lib/readable_parallel.js') + , ReadableSerial = require('./lib/readable_serial.js') + , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js') + ; + +// API +module.exports = +{ + parallel : ReadableParallel, + serial : ReadableSerial, + serialOrdered : ReadableSerialOrdered, +}; + +inherits(ReadableAsyncKit, Readable); + +inherits(ReadableParallel, ReadableAsyncKit); +inherits(ReadableSerial, ReadableAsyncKit); +inherits(ReadableSerialOrdered, ReadableAsyncKit); diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..0478be8 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..a18faa8 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/combined-stream/License b/node_modules/combined-stream/License new file mode 100644 index 0000000..4804b7a --- /dev/null +++ b/node_modules/combined-stream/License @@ -0,0 +1,19 @@ +Copyright (c) 2011 Debuggable Limited + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/combined-stream/Readme.md b/node_modules/combined-stream/Readme.md new file mode 100644 index 0000000..9e367b5 --- /dev/null +++ b/node_modules/combined-stream/Readme.md @@ -0,0 +1,138 @@ +# combined-stream + +A stream that emits multiple other streams one after another. + +**NB** Currently `combined-stream` works with streams version 1 only. There is ongoing effort to switch this library to streams version 2. Any help is welcome. :) Meanwhile you can explore other libraries that provide streams2 support with more or less compatibility with `combined-stream`. + +- [combined-stream2](https://www.npmjs.com/package/combined-stream2): A drop-in streams2-compatible replacement for the combined-stream module. + +- [multistream](https://www.npmjs.com/package/multistream): A stream that emits multiple other streams one after another. + +## Installation + +``` bash +npm install combined-stream +``` + +## Usage + +Here is a simple example that shows how you can use combined-stream to combine +two files into one: + +``` javascript +var CombinedStream = require('combined-stream'); +var fs = require('fs'); + +var combinedStream = CombinedStream.create(); +combinedStream.append(fs.createReadStream('file1.txt')); +combinedStream.append(fs.createReadStream('file2.txt')); + +combinedStream.pipe(fs.createWriteStream('combined.txt')); +``` + +While the example above works great, it will pause all source streams until +they are needed. If you don't want that to happen, you can set `pauseStreams` +to `false`: + +``` javascript +var CombinedStream = require('combined-stream'); +var fs = require('fs'); + +var combinedStream = CombinedStream.create({pauseStreams: false}); +combinedStream.append(fs.createReadStream('file1.txt')); +combinedStream.append(fs.createReadStream('file2.txt')); + +combinedStream.pipe(fs.createWriteStream('combined.txt')); +``` + +However, what if you don't have all the source streams yet, or you don't want +to allocate the resources (file descriptors, memory, etc.) for them right away? +Well, in that case you can simply provide a callback that supplies the stream +by calling a `next()` function: + +``` javascript +var CombinedStream = require('combined-stream'); +var fs = require('fs'); + +var combinedStream = CombinedStream.create(); +combinedStream.append(function(next) { + next(fs.createReadStream('file1.txt')); +}); +combinedStream.append(function(next) { + next(fs.createReadStream('file2.txt')); +}); + +combinedStream.pipe(fs.createWriteStream('combined.txt')); +``` + +## API + +### CombinedStream.create([options]) + +Returns a new combined stream object. Available options are: + +* `maxDataSize` +* `pauseStreams` + +The effect of those options is described below. + +### combinedStream.pauseStreams = `true` + +Whether to apply back pressure to the underlaying streams. If set to `false`, +the underlaying streams will never be paused. If set to `true`, the +underlaying streams will be paused right after being appended, as well as when +`delayedStream.pipe()` wants to throttle. + +### combinedStream.maxDataSize = `2 * 1024 * 1024` + +The maximum amount of bytes (or characters) to buffer for all source streams. +If this value is exceeded, `combinedStream` emits an `'error'` event. + +### combinedStream.dataSize = `0` + +The amount of bytes (or characters) currently buffered by `combinedStream`. + +### combinedStream.append(stream) + +Appends the given `stream` to the combinedStream object. If `pauseStreams` is +set to `true, this stream will also be paused right away. + +`streams` can also be a function that takes one parameter called `next`. `next` +is a function that must be invoked in order to provide the `next` stream, see +example above. + +Regardless of how the `stream` is appended, combined-stream always attaches an +`'error'` listener to it, so you don't have to do that manually. + +Special case: `stream` can also be a String or Buffer. + +### combinedStream.write(data) + +You should not call this, `combinedStream` takes care of piping the appended +streams into itself for you. + +### combinedStream.resume() + +Causes `combinedStream` to start drain the streams it manages. The function is +idempotent, and also emits a `'resume'` event each time which usually goes to +the stream that is currently being drained. + +### combinedStream.pause(); + +If `combinedStream.pauseStreams` is set to `false`, this does nothing. +Otherwise a `'pause'` event is emitted, this goes to the stream that is +currently being drained, so you can use it to apply back pressure. + +### combinedStream.end(); + +Sets `combinedStream.writable` to false, emits an `'end'` event, and removes +all streams from the queue. + +### combinedStream.destroy(); + +Same as `combinedStream.end()`, except it emits a `'close'` event instead of +`'end'`. + +## License + +combined-stream is licensed under the MIT license. diff --git a/node_modules/combined-stream/lib/combined_stream.js b/node_modules/combined-stream/lib/combined_stream.js new file mode 100644 index 0000000..125f097 --- /dev/null +++ b/node_modules/combined-stream/lib/combined_stream.js @@ -0,0 +1,208 @@ +var util = require('util'); +var Stream = require('stream').Stream; +var DelayedStream = require('delayed-stream'); + +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; + + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); + +CombinedStream.create = function(options) { + var combinedStream = new this(); + + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; + } + + return combinedStream; +}; + +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; + +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; + } + + this._handleErrors(stream); + + if (this.pauseStreams) { + stream.pause(); + } + } + + this._streams.push(stream); + return this; +}; + +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; + +CombinedStream.prototype._getNext = function() { + this._currentStream = null; + + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call + } + + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; + +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); + + + if (typeof stream == 'undefined') { + this.end(); + return; + } + + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; + } + + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } + + this._pipeNext(stream); + }.bind(this)); +}; + +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; + + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } + + var value = stream; + this.write(value); + this._getNext(); +}; + +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; + +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; + +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; + +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; + +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; + +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; + +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; + +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } + + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; + +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; + + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } + + self.dataSize += stream.dataSize; + }); + + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; + +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; diff --git a/node_modules/combined-stream/package.json b/node_modules/combined-stream/package.json new file mode 100644 index 0000000..6982b6d --- /dev/null +++ b/node_modules/combined-stream/package.json @@ -0,0 +1,25 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "combined-stream", + "description": "A stream that emits multiple other streams one after another.", + "version": "1.0.8", + "homepage": "https://github.com/felixge/node-combined-stream", + "repository": { + "type": "git", + "url": "git://github.com/felixge/node-combined-stream.git" + }, + "main": "./lib/combined_stream", + "scripts": { + "test": "node test/run.js" + }, + "engines": { + "node": ">= 0.8" + }, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "devDependencies": { + "far": "~0.0.7" + }, + "license": "MIT" +} diff --git a/node_modules/combined-stream/yarn.lock b/node_modules/combined-stream/yarn.lock new file mode 100644 index 0000000..7edf418 --- /dev/null +++ b/node_modules/combined-stream/yarn.lock @@ -0,0 +1,17 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + +far@~0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/far/-/far-0.0.7.tgz#01c1fd362bcd26ce9cf161af3938aa34619f79a7" + dependencies: + oop "0.0.3" + +oop@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/oop/-/oop-0.0.3.tgz#70fa405a5650891a194fdc82ca68dad6dabf4401" diff --git a/node_modules/concat-map/.travis.yml b/node_modules/concat-map/.travis.yml new file mode 100644 index 0000000..f1d0f13 --- /dev/null +++ b/node_modules/concat-map/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/concat-map/LICENSE b/node_modules/concat-map/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/concat-map/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/concat-map/README.markdown b/node_modules/concat-map/README.markdown new file mode 100644 index 0000000..408f70a --- /dev/null +++ b/node_modules/concat-map/README.markdown @@ -0,0 +1,62 @@ +concat-map +========== + +Concatenative mapdashery. + +[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) + +[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) + +example +======= + +``` js +var concatMap = require('concat-map'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); +``` + +*** + +``` +[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] +``` + +methods +======= + +``` js +var concatMap = require('concat-map') +``` + +concatMap(xs, fn) +----------------- + +Return an array of concatenated elements by calling `fn(x, i)` for each element +`x` and each index `i` in the array `xs`. + +When `fn(x, i)` returns an array, its result will be concatenated with the +result array. If `fn(x, i)` returns anything else, that value will be pushed +onto the end of the result array. + +install +======= + +With [npm](http://npmjs.org) do: + +``` +npm install concat-map +``` + +license +======= + +MIT + +notes +===== + +This module was written while sitting high above the ground in a tree. diff --git a/node_modules/concat-map/example/map.js b/node_modules/concat-map/example/map.js new file mode 100644 index 0000000..3365621 --- /dev/null +++ b/node_modules/concat-map/example/map.js @@ -0,0 +1,6 @@ +var concatMap = require('../'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); diff --git a/node_modules/concat-map/index.js b/node_modules/concat-map/index.js new file mode 100644 index 0000000..b29a781 --- /dev/null +++ b/node_modules/concat-map/index.js @@ -0,0 +1,13 @@ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/concat-map/package.json b/node_modules/concat-map/package.json new file mode 100644 index 0000000..d3640e6 --- /dev/null +++ b/node_modules/concat-map/package.json @@ -0,0 +1,43 @@ +{ + "name" : "concat-map", + "description" : "concatenative mapdashery", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/node-concat-map.git" + }, + "main" : "index.js", + "keywords" : [ + "concat", + "concatMap", + "map", + "functional", + "higher-order" + ], + "directories" : { + "example" : "example", + "test" : "test" + }, + "scripts" : { + "test" : "tape test/*.js" + }, + "devDependencies" : { + "tape" : "~2.4.0" + }, + "license" : "MIT", + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : { + "ie" : [ 6, 7, 8, 9 ], + "ff" : [ 3.5, 10, 15.0 ], + "chrome" : [ 10, 22 ], + "safari" : [ 5.1 ], + "opera" : [ 12 ] + } + } +} diff --git a/node_modules/concat-map/test/map.js b/node_modules/concat-map/test/map.js new file mode 100644 index 0000000..fdbd702 --- /dev/null +++ b/node_modules/concat-map/test/map.js @@ -0,0 +1,39 @@ +var concatMap = require('../'); +var test = require('tape'); + +test('empty or not', function (t) { + var xs = [ 1, 2, 3, 4, 5, 6 ]; + var ixes = []; + var ys = concatMap(xs, function (x, ix) { + ixes.push(ix); + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; + }); + t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]); + t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]); + t.end(); +}); + +test('always something', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ]; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('scalars', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : x; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('undefs', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function () {}); + t.same(ys, [ undefined, undefined, undefined, undefined ]); + t.end(); +}); diff --git a/node_modules/delayed-stream/.npmignore b/node_modules/delayed-stream/.npmignore new file mode 100644 index 0000000..9daeafb --- /dev/null +++ b/node_modules/delayed-stream/.npmignore @@ -0,0 +1 @@ +test diff --git a/node_modules/delayed-stream/License b/node_modules/delayed-stream/License new file mode 100644 index 0000000..4804b7a --- /dev/null +++ b/node_modules/delayed-stream/License @@ -0,0 +1,19 @@ +Copyright (c) 2011 Debuggable Limited + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/delayed-stream/Makefile b/node_modules/delayed-stream/Makefile new file mode 100644 index 0000000..b4ff85a --- /dev/null +++ b/node_modules/delayed-stream/Makefile @@ -0,0 +1,7 @@ +SHELL := /bin/bash + +test: + @./test/run.js + +.PHONY: test + diff --git a/node_modules/delayed-stream/Readme.md b/node_modules/delayed-stream/Readme.md new file mode 100644 index 0000000..aca36f9 --- /dev/null +++ b/node_modules/delayed-stream/Readme.md @@ -0,0 +1,141 @@ +# delayed-stream + +Buffers events from a stream until you are ready to handle them. + +## Installation + +``` bash +npm install delayed-stream +``` + +## Usage + +The following example shows how to write a http echo server that delays its +response by 1000 ms. + +``` javascript +var DelayedStream = require('delayed-stream'); +var http = require('http'); + +http.createServer(function(req, res) { + var delayed = DelayedStream.create(req); + + setTimeout(function() { + res.writeHead(200); + delayed.pipe(res); + }, 1000); +}); +``` + +If you are not using `Stream#pipe`, you can also manually release the buffered +events by calling `delayedStream.resume()`: + +``` javascript +var delayed = DelayedStream.create(req); + +setTimeout(function() { + // Emit all buffered events and resume underlaying source + delayed.resume(); +}, 1000); +``` + +## Implementation + +In order to use this meta stream properly, here are a few things you should +know about the implementation. + +### Event Buffering / Proxying + +All events of the `source` stream are hijacked by overwriting the `source.emit` +method. Until node implements a catch-all event listener, this is the only way. + +However, delayed-stream still continues to emit all events it captures on the +`source`, regardless of whether you have released the delayed stream yet or +not. + +Upon creation, delayed-stream captures all `source` events and stores them in +an internal event buffer. Once `delayedStream.release()` is called, all +buffered events are emitted on the `delayedStream`, and the event buffer is +cleared. After that, delayed-stream merely acts as a proxy for the underlaying +source. + +### Error handling + +Error events on `source` are buffered / proxied just like any other events. +However, `delayedStream.create` attaches a no-op `'error'` listener to the +`source`. This way you only have to handle errors on the `delayedStream` +object, rather than in two places. + +### Buffer limits + +delayed-stream provides a `maxDataSize` property that can be used to limit +the amount of data being buffered. In order to protect you from bad `source` +streams that don't react to `source.pause()`, this feature is enabled by +default. + +## API + +### DelayedStream.create(source, [options]) + +Returns a new `delayedStream`. Available options are: + +* `pauseStream` +* `maxDataSize` + +The description for those properties can be found below. + +### delayedStream.source + +The `source` stream managed by this object. This is useful if you are +passing your `delayedStream` around, and you still want to access properties +on the `source` object. + +### delayedStream.pauseStream = true + +Whether to pause the underlaying `source` when calling +`DelayedStream.create()`. Modifying this property afterwards has no effect. + +### delayedStream.maxDataSize = 1024 * 1024 + +The amount of data to buffer before emitting an `error`. + +If the underlaying source is emitting `Buffer` objects, the `maxDataSize` +refers to bytes. + +If the underlaying source is emitting JavaScript strings, the size refers to +characters. + +If you know what you are doing, you can set this property to `Infinity` to +disable this feature. You can also modify this property during runtime. + +### delayedStream.dataSize = 0 + +The amount of data buffered so far. + +### delayedStream.readable + +An ECMA5 getter that returns the value of `source.readable`. + +### delayedStream.resume() + +If the `delayedStream` has not been released so far, `delayedStream.release()` +is called. + +In either case, `source.resume()` is called. + +### delayedStream.pause() + +Calls `source.pause()`. + +### delayedStream.pipe(dest) + +Calls `delayedStream.resume()` and then proxies the arguments to `source.pipe`. + +### delayedStream.release() + +Emits and clears all events that have been buffered up so far. This does not +resume the underlaying source, use `delayedStream.resume()` instead. + +## License + +delayed-stream is licensed under the MIT license. diff --git a/node_modules/delayed-stream/lib/delayed_stream.js b/node_modules/delayed-stream/lib/delayed_stream.js new file mode 100644 index 0000000..b38fc85 --- /dev/null +++ b/node_modules/delayed-stream/lib/delayed_stream.js @@ -0,0 +1,107 @@ +var Stream = require('stream').Stream; +var util = require('util'); + +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; + + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); + +DelayedStream.create = function(source, options) { + var delayedStream = new this(); + + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; + } + + delayedStream.source = source; + + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; + + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); + } + + return delayedStream; +}; + +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; + } +}); + +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; + +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); + } + + this.source.resume(); +}; + +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; + +DelayedStream.prototype.release = function() { + this._released = true; + + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; + +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; + +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } + + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); + } + + this._bufferedEvents.push(args); +}; + +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; + } + + if (this.dataSize <= this.maxDataSize) { + return; + } + + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; diff --git a/node_modules/delayed-stream/package.json b/node_modules/delayed-stream/package.json new file mode 100644 index 0000000..eea3291 --- /dev/null +++ b/node_modules/delayed-stream/package.json @@ -0,0 +1,27 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "contributors": [ + "Mike Atkins " + ], + "name": "delayed-stream", + "description": "Buffers events from a stream until you are ready to handle them.", + "license": "MIT", + "version": "1.0.0", + "homepage": "https://github.com/felixge/node-delayed-stream", + "repository": { + "type": "git", + "url": "git://github.com/felixge/node-delayed-stream.git" + }, + "main": "./lib/delayed_stream", + "engines": { + "node": ">=0.4.0" + }, + "scripts": { + "test": "make test" + }, + "dependencies": {}, + "devDependencies": { + "fake": "0.2.0", + "far": "0.0.1" + } +} diff --git a/node_modules/event-target-shim/LICENSE b/node_modules/event-target-shim/LICENSE new file mode 100644 index 0000000..c39e694 --- /dev/null +++ b/node_modules/event-target-shim/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/event-target-shim/README.md b/node_modules/event-target-shim/README.md new file mode 100644 index 0000000..a4f9c1b --- /dev/null +++ b/node_modules/event-target-shim/README.md @@ -0,0 +1,293 @@ +# event-target-shim + +[![npm version](https://img.shields.io/npm/v/event-target-shim.svg)](https://www.npmjs.com/package/event-target-shim) +[![Downloads/month](https://img.shields.io/npm/dm/event-target-shim.svg)](http://www.npmtrends.com/event-target-shim) +[![Build Status](https://travis-ci.org/mysticatea/event-target-shim.svg?branch=master)](https://travis-ci.org/mysticatea/event-target-shim) +[![Coverage Status](https://codecov.io/gh/mysticatea/event-target-shim/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/event-target-shim) +[![Dependency Status](https://david-dm.org/mysticatea/event-target-shim.svg)](https://david-dm.org/mysticatea/event-target-shim) + +An implementation of [WHATWG EventTarget interface](https://dom.spec.whatwg.org/#interface-eventtarget), plus few extensions. + +- This provides `EventTarget` constructor that can inherit for your custom object. +- This provides an utility that defines properties of attribute listeners (e.g. `obj.onclick`). + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" + +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Use +const foo = new Foo() +foo.addEventListener("hello", e => console.log("hello", e)) +foo.onhello = e => console.log("onhello:", e) +foo.dispatchEvent(new CustomEvent("hello")) +``` + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install event-target-shim +``` + +Or download from [`dist` directory](./dist). + +- [dist/event-target-shim.mjs](dist/event-target-shim.mjs) ... ES modules version. +- [dist/event-target-shim.js](dist/event-target-shim.js) ... Common JS version. +- [dist/event-target-shim.umd.js](dist/event-target-shim.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" +// or +const {EventTarget, defineEventAttribute} = require("event-target-shim") + +// or UMD version defines a global variable: +const {EventTarget, defineEventAttribute} = window.EventTargetShim +``` + +### EventTarget + +> https://dom.spec.whatwg.org/#interface-eventtarget + +#### eventTarget.addEventListener(type, callback, options) + +Register an event listener. + +- `type` is a string. This is the event name to register. +- `callback` is a function. This is the event listener to register. +- `options` is a boolean or an object `{ capture?: boolean, passive?: boolean, once?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + - `passive` is the flag to ignore `event.preventDefault()` method in the event listener. + - `once` is the flag to remove the event listener automatically after the first call. + +#### eventTarget.removeEventListener(type, callback, options) + +Unregister an event listener. + +- `type` is a string. This is the event name to unregister. +- `callback` is a function. This is the event listener to unregister. +- `options` is a boolean or an object `{ capture?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + +#### eventTarget.dispatchEvent(event) + +Dispatch an event. + +- `event` is a [Event](https://dom.spec.whatwg.org/#event) object or an object `{ type: string, [key: string]: any }`. The latter is non-standard but useful. In both cases, listeners receive the event as implementing [Event](https://dom.spec.whatwg.org/#event) interface. + +### defineEventAttribute(proto, type) + +Define an event attribute (e.g. `onclick`) to `proto`. This is non-standard. + +- `proto` is an object (assuming it's a prototype object). This function defines a getter/setter pair for the event attribute. +- `type` is a string. This is the event name to define. + +For example: + +```js +class AbortSignal extends EventTarget { + constructor() { + this.aborted = false + } +} +// Define `onabort` property. +defineEventAttribute(AbortSignal.prototype, "abort") +``` + +### EventTarget(types) + +Define a custom `EventTarget` class with event attributes. This is non-standard. + +- `types` is a string or an array of strings. This is the event name to define. + +For example: + +```js +// This has `onabort` property. +class AbortSignal extends EventTarget("abort") { + constructor() { + this.aborted = false + } +} +``` + +## 📚 Examples + +### ES2015 and later + +> https://jsfiddle.net/636vea92/ + +```js +const {EventTarget, defineEventAttribute} = EventTargetShim + +// Define a derived class. +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e) +}) +foo.onhello = (e) => { + console.log("onhello", e) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +### Typescript + +```ts +import { EventTarget, defineEventAttribute } from "event-target-shim"; + +// Define events +type FooEvents = { + hello: CustomEvent +} +type FooEventAttributes = { + onhello: CustomEvent +} + +// Define a derived class. +class Foo extends EventTarget { + // ... +} +// Define `foo.onhello` property's implementation. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e.detail) +}) +foo.onhello = (e) => { + console.log("onhello", e.detail) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +Unfortunately, both `FooEvents` and `FooEventAttributes` are needed because TypeScript doesn't allow the mutation of string literal types. If TypeScript allowed us to compute `"onhello"` from `"hello"` in types, `FooEventAttributes` will be optional. + +This `EventTarget` type is compatible with `EventTarget` interface of `lib.dom.d.ts`. + +#### To disallow unknown events + +By default, methods such as `addEventListener` accept unknown events. You can disallow unknown events by the third type parameter `"strict"`. + +```ts +type FooEvents = { + hello: CustomEvent +} +class Foo extends EventTarget { + // ... +} + +// OK because `hello` is defined in FooEvents. +foo.addEventListener("hello", (e) => { +}) +// Error because `unknown` is not defined in FooEvents. +foo.addEventListener("unknown", (e) => { +}) +``` + +However, if you use `"strict"` parameter, it loses compatibility with `EventTarget` interface of `lib.dom.d.ts`. + +#### To infer the type of `dispatchEvent()` method + +TypeScript cannot infer the event type of `dispatchEvent()` method properly from the argument in most cases. You can improve this behavior with the following steps: + +1. Use the third type parameter `"strict"`. This prevents inferring to `dispatchEvent()`. +2. Make the `type` property of event definitions stricter. + +```ts +type FooEvents = { + hello: CustomEvent & { type: "hello" } + hey: Event & { type: "hey" } +} +class Foo extends EventTarget { + // ... +} + +// Error because `detail` property is lacking. +foo.dispatchEvent({ type: "hello" }) +``` + +### ES5 + +> https://jsfiddle.net/522zc9de/ + +```js +// Define a derived class. +function Foo() { + EventTarget.call(this) +} +Foo.prototype = Object.create(EventTarget.prototype, { + constructor: { value: Foo, configurable: true, writable: true } + // ... +}) + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +var foo = new Foo() +foo.addEventListener("hello", function(e) { + console.log("hello", e) +}) +foo.onhello = function(e) { + console.log("onhello", e) +} + +// Dispatching events +function isSupportEventConstrucor() { // IE does not support. + try { + new CusomEvent("hello") + return true + } catch (_err) { + return false + } +} +if (isSupportEventConstrucor()) { + foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +} else { + var e = document.createEvent("CustomEvent") + e.initCustomEvent("hello", false, false, "detail") + foo.dispatchEvent(e) +} +``` + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/event-target-shim/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/event-target-shim/dist/event-target-shim.js b/node_modules/event-target-shim/dist/event-target-shim.js new file mode 100644 index 0000000..53ce220 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js @@ -0,0 +1,871 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports.default = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.js.map b/node_modules/event-target-shim/dist/event-target-shim.js.map new file mode 100644 index 0000000..83c5f62 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.js","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs b/node_modules/event-target-shim/dist/event-target-shim.mjs new file mode 100644 index 0000000..114f3a1 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs @@ -0,0 +1,862 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +export default EventTarget; +export { defineEventAttribute, EventTarget }; +//# sourceMappingURL=event-target-shim.mjs.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs.map b/node_modules/event-target-shim/dist/event-target-shim.mjs.map new file mode 100644 index 0000000..57b3e8f --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.mjs","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.umd.js b/node_modules/event-target-shim/dist/event-target-shim.umd.js new file mode 100644 index 0000000..e7cf5d4 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.umd.js @@ -0,0 +1,6 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.EventTargetShim={}))})(this,function(a){"use strict";function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a){var b=u.get(a);return console.assert(null!=b,"'this' is expected an Event object, but got",a),b}function d(a){return null==a.passiveListener?void(!a.event.cancelable||(a.canceled=!0,"function"==typeof a.event.preventDefault&&a.event.preventDefault())):void("undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",a.passiveListener))}function e(a,b){u.set(this,{eventTarget:a,event:b,eventPhase:2,currentTarget:a,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:b.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var c,d=Object.keys(b),e=0;e}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","isObject","x","_typeof","getListeners","listeners","listenersMap","TypeError","defineEventAttributeDescriptor","eventName","node","listenerType","listener","next","prev","delete","newNode","passive","once","defineEventAttribute","eventTargetPrototype","defineCustomEventTarget","eventNames","CustomEventTarget","EventTarget","Map","Array","isArray","types","WeakMap","type","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","CAPTURE","BUBBLE","addEventListener","options","optionsIsObj","capture","removeEventListener","dispatchEvent","wrappedEvent","err","handleEvent"],"mappings":";;;;wbAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZD,CAAgBF,CAAhBE,QACbE,CAAAA,OAAO,CAACC,MAARD,CACY,IAARH,EAAAA,CADJG,CAEI,6CAFJA,CAGIJ,CAHJI,EAKOH,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxBA,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAALO,CAAWE,UAbS,GAiBzBF,CAAI,CAACG,QAALH,GAjByB,CAkBgB,UAArC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWI,cAlBG,EAmBrBJ,CAAI,CAACP,KAALO,CAAWI,cAAXJ,EAnBqB,QAGE,WAAnB,QAAOH,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAARR,CACI,oEADJA,CAEIG,CAAI,CAACC,eAFTJ,CANiB,EAiC7B,QAASS,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZb,CAAgB,IAAhBA,CAAsB,CAClBY,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAANpB,EAAmBqB,IAAI,CAACC,GAALD,EATZ,CAAtBnB,CAD+B,CAc/BqB,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4B,WAA5BA,CAAyC,CAAEE,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzCH,CAd+B,QAmBrBI,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYvB,CAAZuB,EACJM,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4BI,CAA5BJ,CAAiCQ,CAAwB,CAACJ,CAAD,CAAzDJ,EAyOZ,QAASQ,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,CAFR,CAAA,CAIHgB,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,EAAsB0B,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAFA,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL3B,CAAWkC,KAAXlC,CAAiBA,CAAjBA,CAAwBmC,SAAxBnC,CAHR,CAAA,CAKHgC,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAVH,CAAe,IAAfA,CAAqBvB,CAArBuB,CAAkCrC,CAAlCqC,KAPET,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYe,CAAZf,KACO,CAAhBK,GAAAA,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZF,CAAwBhB,MAAM,CAACmB,MAAPnB,CAAcc,CAAS,CAACI,SAAxBlB,CAAmC,CACvDoB,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnCrB,CAXa,KAgBhC,GACKI,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAPvB,CAAgCe,CAAhCf,CAAuCI,CAAvCJ,CADY,CAEzBwB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAPD,CACIgB,CAAW,CAACE,SADhBlB,CAEII,CAFJJ,CAGIwB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlCJ,QAUDgB,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAATA,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT+C,CAAaZ,CAAbY,QACC,KAAXD,EAAAA,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBe,CAAtBf,CAAD,CAAX,CAA2Ce,CAA3C,EACvBY,CAAQ,CAACnC,GAATmC,CAAaZ,CAAbY,CAAoBD,CAApBC,GAEGD,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBvB,CAAtBuB,CAAD,QACnB,IAAI8B,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAFD,CAAUoB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAFD,CAAUiB,UAAVjB,CAAuBiB,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAFD,CAAUkB,aAAVlB,CAA0BkB,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAFD,CAAUS,eAAVT,CAA4BS,EC3bhC,QAASkD,CAAAA,CAAT,CAAkBC,CAAlB,CAAqB,OACJ,KAANA,GAAAA,CAAC,EAA0B,QAAb,GAAAC,EAAOD,GAShC,QAASE,CAAAA,CAAT,CAAsB/C,CAAtB,CAAmC,IACzBgD,CAAAA,CAAS,CAAGC,CAAY,CAAC5D,GAAb4D,CAAiBjD,CAAjBiD,KACD,IAAbD,EAAAA,OACM,IAAIE,CAAAA,SAAJ,CACF,kEADE,QAIHF,CAAAA,EASX,QAASG,CAAAA,CAAT,CAAwCC,CAAxC,CAAmD,OACxC,CACH/D,GADG,WACG,QACI2D,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAD5B,CAEEM,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CAFT,CAGa,IAARK,EAAAA,CAHL,EAGmB,IACbA,IAAAA,CAAI,CAACC,mBACED,CAAAA,CAAI,CAACE,SAEhBF,CAAI,CAAGA,CAAI,CAACG,WAET,KAVR,CAAA,CAaHvD,GAbG,UAaCsD,EAAU,CACc,UAApB,QAAOA,CAAAA,CAAP,EAAmCX,CAAQ,CAACW,CAAD,CADrC,GAENA,CAAQ,CAAG,IAFL,SAIJP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAJpB,CAONU,CAAI,CAAG,IAPD,CAQNJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARD,CASK,IAARK,EAAAA,CATG,EAUFA,IAAAA,CAAI,CAACC,YAVH,CAYW,IAATG,GAAAA,CAZF,CAcuB,IAAdJ,GAAAA,CAAI,CAACG,IAdd,CAiBER,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,CAjBF,CAeEA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,CAfF,CAaES,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,IAbnB,CAoBFC,CAAI,CAAGJ,CApBL,CAuBNA,CAAI,CAAGA,CAAI,CAACG,IAvBN,IA2BO,IAAbD,GAAAA,EAAmB,IACbI,CAAAA,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,EAFA,CAGZM,OAAO,GAHK,CAIZC,IAAI,GAJQ,CAKZL,IAAI,CAAE,IALM,EAOH,IAATC,GAAAA,CARe,CASfT,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,CATe,CAWfS,CAAI,CAACD,IAALC,CAAYE,EAnDrB,CAAA,CAuDHzC,YAAY,GAvDT,CAwDHN,UAAU,GAxDP,EAkEX,QAASkD,CAAAA,CAAT,CAA8BC,CAA9B,CAAoDX,CAApD,CAA+D,CAC3D3C,MAAM,CAACC,cAAPD,CACIsD,CADJtD,aAES2C,EAFT3C,CAGI0C,CAA8B,CAACC,CAAD,CAHlC3C,EAaJ,QAASuD,CAAAA,CAAT,CAAiCC,CAAjC,CAA6C,SAEhCC,CAAAA,GAAoB,CACzBC,CAAW,CAACzC,IAAZyC,CAAiB,IAAjBA,EAGJD,CAAiB,CAACvC,SAAlBuC,CAA8BzD,MAAM,CAACmB,MAAPnB,CAAc0D,CAAW,CAACxC,SAA1BlB,CAAqC,CAC/DoB,WAAW,CAAE,CACTlB,KAAK,CAAEuD,CADE,CAEThD,YAAY,GAFH,CAGTY,QAAQ,GAHC,CADkD,CAArCrB,CANW,KAcpC,GAAIM,CAAAA,CAAC,CAAG,EAAGA,CAAC,CAAGkD,CAAU,CAACjD,OAAQ,EAAED,EACrC+C,CAAoB,CAACI,CAAiB,CAACvC,SAAnB,CAA8BsC,CAAU,CAAClD,CAAD,CAAxC,CAApB+C,OAGGI,CAAAA,EAgBX,QAASC,CAAAA,CAAT,EAAuB,IAEf,eAAgBA,CAAAA,aAChBlB,CAAAA,CAAY,CAAChD,GAAbgD,CAAiB,IAAjBA,CAAuB,GAAImB,CAAAA,GAA3BnB,KAGqB,CAArB5B,GAAAA,SAAS,CAACL,MAAVK,EAA0BgD,KAAK,CAACC,OAAND,CAAchD,SAAS,CAAC,CAAD,CAAvBgD,QACnBL,CAAAA,CAAuB,CAAC3C,SAAS,CAAC,CAAD,CAAV,KAEX,CAAnBA,CAAAA,SAAS,CAACL,OAAY,QAChBuD,CAAAA,CAAK,CAAOF,KAAP,CAAahD,SAAS,CAACL,MAAvB,EACFD,CAAC,CAAG,EAAGA,CAAC,CAAGM,SAAS,CAACL,OAAQ,EAAED,EACpCwD,CAAK,CAACxD,CAAD,CAALwD,CAAWlD,SAAS,CAACN,CAAD,CAApBwD,OAEGP,CAAAA,CAAuB,CAACO,CAAD,OAE5B,IAAIrB,CAAAA,SAAJ,CAAc,mCAAd,KD5KJ9D,CAAAA,CAAW,CAAG,GAAIoF,CAAAA,QAOlBpC,CAAQ,CAAG,GAAIoC,CAAAA,QAkFrBzE,CAAK,CAAC4B,SAAN5B,CAAkB,IAKV0E,CAAAA,MAAO,OACAxF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAewF,IANZ,CAAA,IAaVC,CAAAA,QAAS,OACFzF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASkB,aAtBN,CAAA,CA4BdwE,YA5Bc,WA4BC,IACLxE,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAFA,CAASkB,cADpB,MAEU,KAAjBA,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVyE,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV7E,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASiB,UAzEN,CAAA,CAgFd8E,eAhFc,WAgFI,IACRvF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHc,CAI4B,UAAtC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWuF,eAJR,EAKVvF,CAAI,CAACP,KAALO,CAAWuF,eAAXvF,EArFM,CAAA,CA6FdwF,wBA7Fc,WA6Fa,IACjBxF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHuB,CAIvBA,CAAI,CAACY,gBAALZ,GAJuB,CAK4B,UAA/C,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWwF,wBALC,EAMnBxF,CAAI,CAACP,KAALO,CAAWwF,wBAAXxF,EAnGM,CAAA,IA2GVyF,CAAAA,SAAU,SACKjG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeiG,OA5GpB,CAAA,IAmHVvF,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeU,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIVkG,CAAAA,kBAAmB,OACZlG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASW,QApIN,CAAA,IA2IVwF,CAAAA,UAAW,SACInG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAemG,QA5IpB,CAAA,IAmJV9E,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASqB,SApJN,CAAA,IA4JV+E,CAAAA,YAAa,OACNpG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WA7JN,CAAA,IAqKVsF,CAAAA,cAAe,OACRrG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASmB,OAtKN,CAAA,IAwKVkF,CAAAA,aAAa3E,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,IACuC,SAAnC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAW6F,eAClB7F,CAAI,CAACP,KAALO,CAAW6F,YAAX7F,KAhLM,CAAA,IAyLV8F,CAAAA,aAAc,OACP,CAACtG,CAAE,CAAC,IAAD,CAAFA,CAASW,QA1LP,CAAA,IA4LV2F,CAAAA,YAAY5E,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMduG,SAzMc,WAyMF,EAzME,EA+MlB/E,MAAM,CAACC,cAAPD,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuC,aAAvCA,CAAsD,CAClDE,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtDrB,EAOsB,WAAlB,QAAOgF,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAAC1F,QAC/CU,MAAM,CAACiF,cAAPjF,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuCgF,MAAM,CAAC1F,KAAP0F,CAAa9D,SAApDlB,EAGA2B,CAAQ,CAACnC,GAATmC,CAAaqD,MAAM,CAAC1F,KAAP0F,CAAa9D,SAA1BS,CAAqCrC,CAArCqC,MChTEa,CAAAA,CAAY,CAAG,GAAIuB,CAAAA,QAGnBmB,CAAO,CAAG,EACVC,CAAM,CAAG,KA0KfzB,CAAW,CAACxC,SAAZwC,CAAwB,CAQpB0B,gBARoB,UAQHzC,EAAWG,EAAUuC,EAAS,IAC3B,IAAZvC,EAAAA,MAGoB,UAApB,QAAOA,CAAAA,CAAP,EAAkC,CAACX,CAAQ,CAACW,CAAD,OACrC,IAAIL,CAAAA,SAAJ,CAAc,+CAAd,KAGJF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBgD,CAAY,CAAGnD,CAAQ,CAACkD,CAAD,EACvBE,CAAO,CAAGD,CAAY,GACdD,CAAO,CAACE,OADM,GAEdF,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EACnCjC,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,CAAZA,CAFY,CAGZM,OAAO,CAAEmC,CAAY,IAAYD,CAAO,CAAClC,OAH7B,CAIZC,IAAI,CAAEkC,CAAY,IAAYD,CAAO,CAACjC,IAJ1B,CAKZL,IAAI,CAAE,IALM,EASZH,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,KACPK,SAAAA,aACAL,CAAAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,SAKAS,CAAAA,CAAI,CAAG,KACI,IAARJ,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,SAK1BG,CAAI,CAAGJ,CARU,CASjBA,CAAI,CAAGA,CAAI,CAACG,IAxC2B,CA4C3CC,CAAI,CAACD,IAALC,CAAYE,EApDI,CAAA,CA8DpBsC,mBA9DoB,UA8DA7C,EAAWG,EAAUuC,EAAS,IAC9B,IAAZvC,EAAAA,SAIEP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBiD,CAAO,CAAGpD,CAAQ,CAACkD,CAAD,CAARlD,GACFkD,CAAO,CAACE,OADNpD,GAEFkD,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EAErCnC,CAAI,CAAG,KACPJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,EACI,IAARK,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,cAET,IAATG,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,MASzBC,CAAI,CAAGJ,CAfU,CAgBjBA,CAAI,CAAGA,CAAI,CAACG,KA3FA,CAAA,CAoGpB0C,aApGoB,UAoGNhH,EAAO,IACJ,IAATA,EAAAA,CAAK,EAAkC,QAAtB,QAAOA,CAAAA,CAAK,CAACuF,UACxB,IAAIvB,CAAAA,SAAJ,CAAc,oCAAd,EAFO,GAMXF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CANb,CAOXK,CAAS,CAAGlE,CAAK,CAACuF,IAPP,CAQbpB,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARM,IASL,IAARK,EAAAA,WATa,OAcX8C,CAAAA,CAAY,CAAG7D,CAAS,CAAC,IAAD,CAAOpD,CAAP,CAdb,CAkBbuE,CAAI,CAAG,IAlBM,CAmBF,IAARJ,EAAAA,CAnBU,EAmBI,IAEbA,CAAI,CAACQ,KACQ,IAATJ,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,KAOrBC,CAAI,CAAGJ,EAIXV,CAAkB,CACdwD,CADc,CAEd9C,CAAI,CAACO,OAALP,CAAeA,CAAI,CAACE,QAApBF,CAA+B,IAFjB,EAIW,UAAzB,QAAOA,CAAAA,CAAI,CAACE,YACR,CACAF,CAAI,CAACE,QAALF,CAAc3B,IAAd2B,CAAmB,IAAnBA,CAAyB8C,CAAzB9C,CADJ,CAEE,MAAO+C,CAAP,CAAY,CAEa,WAAnB,QAAO9G,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAHT,EAKNR,OAAO,CAACQ,KAARR,CAAc8G,CAAd9G,MAIR+D,CAAAA,CAAI,CAACC,YAALD,GA/TE,CA+TFA,EACqC,UAArC,QAAOA,CAAAA,CAAI,CAACE,QAALF,CAAcgD,aAErBhD,CAAI,CAACE,QAALF,CAAcgD,WAAdhD,CAA0B8C,CAA1B9C,KAIAb,CAAS,CAAC2D,CAAD,QAIb9C,CAAI,CAAGA,CAAI,CAACG,WAEhBb,CAAAA,CAAkB,CAACwD,CAAD,CAAe,IAAf,EAClB1D,CAAa,CAAC0D,CAAD,CAAe,CAAf,EACbzD,CAAgB,CAACyD,CAAD,CAAe,IAAf,EAET,CAACA,CAAY,CAAChB,iBAvKL,EA4KxB1E,MAAM,CAACC,cAAPD,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6C,aAA7CA,CAA4D,CACxDE,KAAK,CAAEwD,CADiD,CAExDjD,YAAY,GAF4C,CAGxDY,QAAQ,GAHgD,CAA5DrB,EAQsB,WAAlB,QAAOgF,CAAAA,MAAP,EAC8B,WAA9B,QAAOA,CAAAA,MAAM,CAACtB,aAEd1D,MAAM,CAACiF,cAAPjF,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6CgF,MAAM,CAACtB,WAAPsB,CAAmB9D,SAAhElB"} \ No newline at end of file diff --git a/node_modules/event-target-shim/index.d.ts b/node_modules/event-target-shim/index.d.ts new file mode 100644 index 0000000..a303097 --- /dev/null +++ b/node_modules/event-target-shim/index.d.ts @@ -0,0 +1,399 @@ +export as namespace EventTargetShim + +/** + * `Event` interface. + * @see https://dom.spec.whatwg.org/#event + */ +export interface Event { + /** + * The type of this event. + */ + readonly type: string + + /** + * The target of this event. + */ + readonly target: EventTarget<{}, {}, "standard"> | null + + /** + * The current target of this event. + */ + readonly currentTarget: EventTarget<{}, {}, "standard"> | null + + /** + * The target of this event. + * @deprecated + */ + readonly srcElement: any | null + + /** + * The composed path of this event. + */ + composedPath(): EventTarget<{}, {}, "standard">[] + + /** + * Constant of NONE. + */ + readonly NONE: number + + /** + * Constant of CAPTURING_PHASE. + */ + readonly CAPTURING_PHASE: number + + /** + * Constant of BUBBLING_PHASE. + */ + readonly BUBBLING_PHASE: number + + /** + * Constant of AT_TARGET. + */ + readonly AT_TARGET: number + + /** + * Indicates which phase of the event flow is currently being evaluated. + */ + readonly eventPhase: number + + /** + * Stop event bubbling. + */ + stopPropagation(): void + + /** + * Stop event bubbling. + */ + stopImmediatePropagation(): void + + /** + * Initialize event. + * @deprecated + */ + initEvent(type: string, bubbles?: boolean, cancelable?: boolean): void + + /** + * The flag indicating bubbling. + */ + readonly bubbles: boolean + + /** + * Stop event bubbling. + * @deprecated + */ + cancelBubble: boolean + + /** + * Set or get cancellation flag. + * @deprecated + */ + returnValue: boolean + + /** + * The flag indicating whether the event can be canceled. + */ + readonly cancelable: boolean + + /** + * Cancel this event. + */ + preventDefault(): void + + /** + * The flag to indicating whether the event was canceled. + */ + readonly defaultPrevented: boolean + + /** + * The flag to indicating if event is composed. + */ + readonly composed: boolean + + /** + * Indicates whether the event was dispatched by the user agent. + */ + readonly isTrusted: boolean + + /** + * The unix time of this event. + */ + readonly timeStamp: number +} + +/** + * The constructor of `EventTarget` interface. + */ +export type EventTargetConstructor< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = { + prototype: EventTarget + new(): EventTarget +} + +/** + * `EventTarget` interface. + * @see https://dom.spec.whatwg.org/#interface-eventtarget + */ +export type EventTarget< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = EventTarget.EventAttributes & { + /** + * Add a given listener to this event target. + * @param eventName The event name to add. + * @param listener The listener to add. + * @param options The options for this listener. + */ + addEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.AddOptions + ): void + + /** + * Remove a given listener from this event target. + * @param eventName The event name to remove. + * @param listener The listener to remove. + * @param options The options for this listener. + */ + removeEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.RemoveOptions + ): void + + /** + * Dispatch a given event. + * @param event The event to dispatch. + * @returns `false` if canceled. + */ + dispatchEvent>( + event: EventTarget.EventData + ): boolean +} + +export const EventTarget: EventTargetConstructor & { + /** + * Create an `EventTarget` instance with detailed event definition. + * + * The detailed event definition requires to use `defineEventAttribute()` + * function later. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * const signal = new EventTarget<{ abort: Event }, { onabort: Event }>() + * defineEventAttribute(signal, "abort") + */ + new < + TEvents extends EventTarget.EventDefinition, + TEventAttributes extends EventTarget.EventDefinition, + TMode extends EventTarget.Mode = "loose" + >(): EventTarget + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(event0: string, ...events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > +} + +export namespace EventTarget { + /** + * Options of `removeEventListener()` method. + */ + export interface RemoveOptions { + /** + * The flag to indicate that the listener is for the capturing phase. + */ + capture?: boolean + } + + /** + * Options of `addEventListener()` method. + */ + export interface AddOptions extends RemoveOptions { + /** + * The flag to indicate that the listener doesn't support + * `event.preventDefault()` operation. + */ + passive?: boolean + /** + * The flag to indicate that the listener will be removed on the first + * event. + */ + once?: boolean + } + + /** + * The type of regular listeners. + */ + export interface FunctionListener { + (event: TEvent): void + } + + /** + * The type of object listeners. + */ + export interface ObjectListener { + handleEvent(event: TEvent): void + } + + /** + * The type of listeners. + */ + export type Listener = + | FunctionListener + | ObjectListener + + /** + * Event definition. + */ + export type EventDefinition = { + readonly [key: string]: Event + } + + /** + * Mapped type for event attributes. + */ + export type EventAttributes = { + [P in keyof TEventAttributes]: + | FunctionListener + | null + } + + /** + * The type of event data for `dispatchEvent()` method. + */ + export type EventData< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + TMode extends Mode + > = + TEventType extends keyof TEvents + ? ( + // Require properties which are not generated automatically. + & Pick< + TEvents[TEventType], + Exclude + > + // Properties which are generated automatically are optional. + & Partial> + ) + : ( + TMode extends "standard" + ? Event + : Event | NonStandardEvent + ) + + /** + * The string literal types of the properties which are generated + * automatically in `dispatchEvent()` method. + */ + export type OmittableEventKeys = Exclude + + /** + * The type of event data. + */ + export type NonStandardEvent = { + [key: string]: any + type: string + } + + /** + * The type of listeners. + */ + export type PickEvent< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + > = + TEventType extends keyof TEvents + ? TEvents[TEventType] + : Event + + /** + * Event type candidates. + */ + export type EventType< + TEvents extends EventDefinition, + TMode extends Mode + > = + TMode extends "strict" + ? keyof TEvents + : keyof TEvents | string + + /** + * - `"strict"` ..... Methods don't accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"loose"` ...... Methods accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"standard"` ... Methods accept unknown events. + * `dispatchEvent()` doesn't accept partial objects. + */ + export type Mode = "strict" | "standard" | "loose" +} + +/** + * Specialized `type` property. + */ +export type Type = { type: T } + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param prototype The event target prototype to define an event attribute. + * @param eventName The event name to define. + */ +export function defineEventAttribute( + prototype: EventTarget, + eventName: string +): void + +export default EventTarget diff --git a/node_modules/event-target-shim/package.json b/node_modules/event-target-shim/package.json new file mode 100644 index 0000000..40326f3 --- /dev/null +++ b/node_modules/event-target-shim/package.json @@ -0,0 +1,82 @@ +{ + "name": "event-target-shim", + "version": "5.0.1", + "description": "An implementation of WHATWG EventTarget interface.", + "main": "dist/event-target-shim", + "types": "index.d.ts", + "files": [ + "dist", + "index.d.ts" + ], + "engines": { + "node": ">=6" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "nyc report --reporter lcov && opener coverage/lcov-report/index.html", + "lint": "eslint src test scripts --ext .js,.mjs", + "build": "rollup -c scripts/rollup.config.js", + "pretest": "npm run lint", + "test": "run-s test:*", + "test:mocha": "nyc --require ./scripts/babel-register mocha test/*.mjs", + "test:karma": "karma start scripts/karma.conf.js --single-run", + "watch": "run-p watch:*", + "watch:mocha": "mocha test/*.mjs --require ./scripts/babel-register --watch --watch-extensions js,mjs --growl", + "watch:karma": "karma start scripts/karma.conf.js --watch", + "codecov": "codecov" + }, + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.2.3", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.0.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.1", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-json": "^3.1.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-watch": "^4.3.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "repository": { + "type": "git", + "url": "https://github.com/mysticatea/event-target-shim.git" + }, + "keywords": [ + "w3c", + "whatwg", + "eventtarget", + "event", + "events", + "shim" + ], + "author": "Toru Nagashima", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/event-target-shim/issues" + }, + "homepage": "https://github.com/mysticatea/event-target-shim" +} diff --git a/node_modules/events/.airtap.yml b/node_modules/events/.airtap.yml new file mode 100644 index 0000000..c7a8a87 --- /dev/null +++ b/node_modules/events/.airtap.yml @@ -0,0 +1,15 @@ +sauce_connect: true +loopback: airtap.local +browsers: + - name: chrome + version: latest + - name: firefox + version: latest + - name: safari + version: 9..latest + - name: iphone + version: latest + - name: ie + version: 9..latest + - name: microsoftedge + version: 13..latest diff --git a/node_modules/events/.github/FUNDING.yml b/node_modules/events/.github/FUNDING.yml new file mode 100644 index 0000000..8b8cb78 --- /dev/null +++ b/node_modules/events/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/events +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/events/.travis.yml b/node_modules/events/.travis.yml new file mode 100644 index 0000000..486dc3c --- /dev/null +++ b/node_modules/events/.travis.yml @@ -0,0 +1,18 @@ +dist: xenial +os: linux +language: node_js +node_js: + - 'stable' + - 'lts/*' + - '0.12' +script: + - npm test + - if [ "${TRAVIS_PULL_REQUEST}" = "false" ] && [ "${TRAVIS_NODE_VERSION}" = "stable" ]; then npm run test:browsers; fi +addons: + sauce_connect: true + hosts: + - airtap.local +env: + global: + - secure: XcBiD8yReflut9q7leKsigDZ0mI3qTKH+QrNVY8DaqlomJOZw8aOrVuX9Jz12l86ZJ41nbxmKnRNkFzcVr9mbP9YaeTb3DpeOBWmvaoSfud9Wnc16VfXtc1FCcwDhSVcSiM3UtnrmFU5cH+Dw1LPh5PbfylYOS/nJxUvG0FFLqI= + - secure: jNWtEbqhUdQ0xXDHvCYfUbKYeJCi6a7B4LsrcxYCyWWn4NIgncE5x2YbB+FSUUFVYfz0dsn5RKP1oHB99f0laUEo18HBNkrAS/rtyOdVzcpJjbQ6kgSILGjnJD/Ty1B57Rcz3iyev5Y7bLZ6Y1FbDnk/i9/l0faOGz8vTC3Vdkc= diff --git a/node_modules/events/History.md b/node_modules/events/History.md new file mode 100644 index 0000000..f48bf21 --- /dev/null +++ b/node_modules/events/History.md @@ -0,0 +1,118 @@ +# 3.3.0 + + - Support EventTarget emitters in `events.once` from Node.js 12.11.0. + + Now you can use the `events.once` function with objects that implement the EventTarget interface. This interface is used widely in + the DOM and other web APIs. + + ```js + var events = require('events'); + var assert = require('assert'); + + async function connect() { + var ws = new WebSocket('wss://example.com'); + await events.once(ws, 'open'); + assert(ws.readyState === WebSocket.OPEN); + } + + async function onClick() { + await events.once(document.body, 'click'); + alert('you clicked the page!'); + } + ``` + +# 3.2.0 + + - Add `events.once` from Node.js 11.13.0. + + To use this function, Promises must be supported in the environment. Use a polyfill like `es6-promise` if you support older browsers. + +# 3.1.0 (2020-01-08) + +`events` now matches the Node.js 11.12.0 API. + + - pass through return value in wrapped `emitter.once()` listeners + + Now, this works: + ```js + emitter.once('myevent', function () { return 1; }); + var listener = emitter.rawListeners('myevent')[0] + assert(listener() === 1); + ``` + Previously, `listener()` would return undefined regardless of the implementation. + + Ported from https://github.com/nodejs/node/commit/acc506c2d2771dab8d7bba6d3452bc5180dff7cf + + - Reduce code duplication in listener type check ([#67](https://github.com/Gozala/events/pull/67) by [@friederbluemle](https://github.com/friederbluemle)). + - Improve `emitter.once()` performance in some engines + +# 3.0.0 (2018-05-25) + +**This version drops support for IE8.** `events` no longer includes polyfills +for ES5 features. If you need to support older environments, use an ES5 shim +like [es5-shim](https://npmjs.com/package/es5-shim). Both the shim and sham +versions of es5-shim are necessary. + + - Update to events code from Node.js 10.x + - (semver major) Adds `off()` method + - Port more tests from Node.js + - Switch browser tests to airtap, making things more reliable + +# 2.1.0 (2018-05-25) + + - add Emitter#rawListeners from Node.js v9.4 + +# 2.0.0 (2018-02-02) + + - Update to events code from node.js 8.x + - Adds `prependListener()` and `prependOnceListener()` + - Adds `eventNames()` method + - (semver major) Unwrap `once()` listeners in `listeners()` + - copy tests from node.js + +Note that this version doubles the gzipped size, jumping from 1.1KB to 2.1KB, +due to new methods and runtime performance improvements. Be aware of that when +upgrading. + +# 1.1.1 (2016-06-22) + + - add more context to errors if they are not instanceof Error + +# 1.1.0 (2015-09-29) + + - add Emitter#listerCount (to match node v4 api) + +# 1.0.2 (2014-08-28) + + - remove un-reachable code + - update devDeps + +## 1.0.1 / 2014-05-11 + + - check for console.trace before using it + +## 1.0.0 / 2013-12-10 + + - Update to latest events code from node.js 0.10 + - copy tests from node.js + +## 0.4.0 / 2011-07-03 ## + + - Switching to graphquire@0.8.0 + +## 0.3.0 / 2011-07-03 ## + + - Switching to URL based module require. + +## 0.2.0 / 2011-06-10 ## + + - Simplified package structure. + - Graphquire for dependency management. + +## 0.1.1 / 2011-05-16 ## + + - Unhandled errors are logged via console.error + +## 0.1.0 / 2011-04-22 ## + + - Initial release diff --git a/node_modules/events/LICENSE b/node_modules/events/LICENSE new file mode 100644 index 0000000..52ed3b0 --- /dev/null +++ b/node_modules/events/LICENSE @@ -0,0 +1,22 @@ +MIT + +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/events/Readme.md b/node_modules/events/Readme.md new file mode 100644 index 0000000..80849c0 --- /dev/null +++ b/node_modules/events/Readme.md @@ -0,0 +1,50 @@ +# events [![Build Status](https://travis-ci.org/Gozala/events.png?branch=master)](https://travis-ci.org/Gozala/events) + +> Node's event emitter for all engines. + +This implements the Node.js [`events`][node.js docs] module for environments that do not have it, like browsers. + +> `events` currently matches the **Node.js 11.13.0** API. + +Note that the `events` module uses ES5 features. If you need to support very old browsers like IE8, use a shim like [`es5-shim`](https://www.npmjs.com/package/es5-shim). You need both the shim and the sham versions of `es5-shim`. + +This module is maintained, but only by very few people. If you'd like to help, let us know in the [Maintainer Needed](https://github.com/Gozala/events/issues/43) issue! + +## Install + +You usually do not have to install `events` yourself! If your code runs in Node.js, `events` is built in. If your code runs in the browser, bundlers like [browserify](https://github.com/browserify/browserify) or [webpack](https://github.com/webpack/webpack) also include the `events` module. + +But if none of those apply, with npm do: + +``` +npm install events +``` + +## Usage + +```javascript +var EventEmitter = require('events') + +var ee = new EventEmitter() +ee.on('message', function (text) { + console.log(text) +}) +ee.emit('message', 'hello world') +``` + +## API + +See the [Node.js EventEmitter docs][node.js docs]. `events` currently matches the Node.js 11.13.0 API. + +## Contributing + +PRs are very welcome! The main way to contribute to `events` is by porting features, bugfixes and tests from Node.js. Ideally, code contributions to this module are copy-pasted from Node.js and transpiled to ES5, rather than reimplemented from scratch. Matching the Node.js code as closely as possible makes maintenance simpler when new changes land in Node.js. +This module intends to provide exactly the same API as Node.js, so features that are not available in the core `events` module will not be accepted. Feature requests should instead be directed at [nodejs/node](https://github.com/nodejs/node) and will be added to this module once they are implemented in Node.js. + +If there is a difference in behaviour between Node.js's `events` module and this module, please open an issue! + +## License + +[MIT](./LICENSE) + +[node.js docs]: https://nodejs.org/dist/v11.13.0/docs/api/events.html diff --git a/node_modules/events/events.js b/node_modules/events/events.js new file mode 100644 index 0000000..34b69a0 --- /dev/null +++ b/node_modules/events/events.js @@ -0,0 +1,497 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +var R = typeof Reflect === 'object' ? Reflect : null +var ReflectApply = R && typeof R.apply === 'function' + ? R.apply + : function ReflectApply(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + } + +var ReflectOwnKeys +if (R && typeof R.ownKeys === 'function') { + ReflectOwnKeys = R.ownKeys +} else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target) + .concat(Object.getOwnPropertySymbols(target)); + }; +} else { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target); + }; +} + +function ProcessEmitWarning(warning) { + if (console && console.warn) console.warn(warning); +} + +var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) { + return value !== value; +} + +function EventEmitter() { + EventEmitter.init.call(this); +} +module.exports = EventEmitter; +module.exports.once = once; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +var defaultMaxListeners = 10; + +function checkListener(listener) { + if (typeof listener !== 'function') { + throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener); + } +} + +Object.defineProperty(EventEmitter, 'defaultMaxListeners', { + enumerable: true, + get: function() { + return defaultMaxListeners; + }, + set: function(arg) { + if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.'); + } + defaultMaxListeners = arg; + } +}); + +EventEmitter.init = function() { + + if (this._events === undefined || + this._events === Object.getPrototypeOf(this)._events) { + this._events = Object.create(null); + this._eventsCount = 0; + } + + this._maxListeners = this._maxListeners || undefined; +}; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) { + throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.'); + } + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) + return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) args.push(arguments[i]); + var doError = (type === 'error'); + + var events = this._events; + if (events !== undefined) + doError = (doError && events.error === undefined); + else if (!doError) + return false; + + // If there is no 'error' event listener then throw. + if (doError) { + var er; + if (args.length > 0) + er = args[0]; + if (er instanceof Error) { + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + // At least give some kind of context to the user + var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : '')); + err.context = er; + throw err; // Unhandled 'error' event + } + + var handler = events[type]; + + if (handler === undefined) + return false; + + if (typeof handler === 'function') { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); + for (var i = 0; i < len; ++i) + ReflectApply(listeners[i], this, args); + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = Object.create(null); + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit('newListener', type, + listener.listener ? listener.listener : listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === 'function') { + // Adding the second element, need to change to array. + existing = events[type] = + prepend ? [listener, existing] : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + // eslint-disable-next-line no-restricted-syntax + var w = new Error('Possible EventEmitter memory leak detected. ' + + existing.length + ' ' + String(type) + ' listeners ' + + 'added. Use emitter.setMaxListeners() to ' + + 'increase limit'); + w.name = 'MaxListenersExceededWarning'; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + + return target; +} + +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.prependListener = + function prependListener(type, listener) { + return _addListener(this, type, listener, true); + }; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) + return this.listener.call(this.target); + return this.listener.apply(this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +EventEmitter.prototype.prependOnceListener = + function prependOnceListener(type, listener) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + +// Emits a 'removeListener' event if and only if the listener was removed. +EventEmitter.prototype.removeListener = + function removeListener(type, listener) { + var list, events, position, i, originalListener; + + checkListener(listener); + + events = this._events; + if (events === undefined) + return this; + + list = events[type]; + if (list === undefined) + return this; + + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else { + delete events[type]; + if (events.removeListener) + this.emit('removeListener', type, list.listener || listener); + } + } else if (typeof list !== 'function') { + position = -1; + + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + + if (position < 0) + return this; + + if (position === 0) + list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) + events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit('removeListener', type, originalListener || listener); + } + + return this; + }; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +EventEmitter.prototype.removeAllListeners = + function removeAllListeners(type) { + var listeners, events, i; + + events = this._events; + if (events === undefined) + return this; + + // not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else + delete events[type]; + } + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = Object.create(null); + this._eventsCount = 0; + return this; + } + + listeners = events[type]; + + if (typeof listeners === 'function') { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; + }; + +function _listeners(target, type, unwrap) { + var events = target._events; + + if (events === undefined) + return []; + + var evlistener = events[type]; + if (evlistener === undefined) + return []; + + if (typeof evlistener === 'function') + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? + unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); +} + +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +EventEmitter.listenerCount = function(emitter, type) { + if (typeof emitter.listenerCount === 'function') { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } +}; + +EventEmitter.prototype.listenerCount = listenerCount; +function listenerCount(type) { + var events = this._events; + + if (events !== undefined) { + var evlistener = events[type]; + + if (typeof evlistener === 'function') { + return 1; + } else if (evlistener !== undefined) { + return evlistener.length; + } + } + + return 0; +} + +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) + copy[i] = arr[i]; + return copy; +} + +function spliceOne(list, index) { + for (; index + 1 < list.length; index++) + list[index] = list[index + 1]; + list.pop(); +} + +function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; +} + +function once(emitter, name) { + return new Promise(function (resolve, reject) { + function errorListener(err) { + emitter.removeListener(name, resolver); + reject(err); + } + + function resolver() { + if (typeof emitter.removeListener === 'function') { + emitter.removeListener('error', errorListener); + } + resolve([].slice.call(arguments)); + }; + + eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); + if (name !== 'error') { + addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); + } + }); +} + +function addErrorHandlerIfEventEmitter(emitter, handler, flags) { + if (typeof emitter.on === 'function') { + eventTargetAgnosticAddListener(emitter, 'error', handler, flags); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === 'function') { + if (flags.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === 'function') { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we do not listen for `error` events here. + emitter.addEventListener(name, function wrapListener(arg) { + // IE does not have builtin `{ once: true }` support so we + // have to do it manually. + if (flags.once) { + emitter.removeEventListener(name, wrapListener); + } + listener(arg); + }); + } else { + throw new TypeError('The "emitter" argument must be of type EventEmitter. Received type ' + typeof emitter); + } +} diff --git a/node_modules/events/package.json b/node_modules/events/package.json new file mode 100644 index 0000000..b9580d8 --- /dev/null +++ b/node_modules/events/package.json @@ -0,0 +1,37 @@ +{ + "name": "events", + "version": "3.3.0", + "description": "Node's event emitter for all engines.", + "keywords": [ + "events", + "eventEmitter", + "eventDispatcher", + "listeners" + ], + "author": "Irakli Gozalishvili (http://jeditoolkit.com)", + "repository": { + "type": "git", + "url": "git://github.com/Gozala/events.git", + "web": "https://github.com/Gozala/events" + }, + "bugs": { + "url": "http://github.com/Gozala/events/issues/" + }, + "main": "./events.js", + "engines": { + "node": ">=0.8.x" + }, + "devDependencies": { + "airtap": "^1.0.0", + "functions-have-names": "^1.2.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "isarray": "^2.0.5", + "tape": "^5.0.0" + }, + "scripts": { + "test": "node tests/index.js", + "test:browsers": "airtap -- tests/index.js" + }, + "license": "MIT" +} diff --git a/node_modules/events/security.md b/node_modules/events/security.md new file mode 100644 index 0000000..a14ace6 --- /dev/null +++ b/node_modules/events/security.md @@ -0,0 +1,10 @@ +# Security Policy + +## Supported Versions +Only the latest major version is supported at any given time. + +## Reporting a Vulnerability + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/node_modules/events/tests/add-listeners.js b/node_modules/events/tests/add-listeners.js new file mode 100644 index 0000000..9b57827 --- /dev/null +++ b/node_modules/events/tests/add-listeners.js @@ -0,0 +1,111 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var EventEmitter = require('../'); + +{ + var ee = new EventEmitter(); + var events_new_listener_emitted = []; + var listeners_new_listener_emitted = []; + + // Sanity check + assert.strictEqual(ee.addListener, ee.on); + + ee.on('newListener', function(event, listener) { + // Don't track newListener listeners. + if (event === 'newListener') + return; + + events_new_listener_emitted.push(event); + listeners_new_listener_emitted.push(listener); + }); + + var hello = common.mustCall(function(a, b) { + assert.strictEqual('a', a); + assert.strictEqual('b', b); + }); + + ee.once('newListener', function(name, listener) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(listener, hello); + + var listeners = this.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + }); + + ee.on('hello', hello); + ee.once('foo', assert.fail); + + assert.ok(Array.isArray(events_new_listener_emitted)); + assert.strictEqual(events_new_listener_emitted.length, 2); + assert.strictEqual(events_new_listener_emitted[0], 'hello'); + assert.strictEqual(events_new_listener_emitted[1], 'foo'); + + assert.ok(Array.isArray(listeners_new_listener_emitted)); + assert.strictEqual(listeners_new_listener_emitted.length, 2); + assert.strictEqual(listeners_new_listener_emitted[0], hello); + assert.strictEqual(listeners_new_listener_emitted[1], assert.fail); + + ee.emit('hello', 'a', 'b'); +} + +// just make sure that this doesn't throw: +{ + var f = new EventEmitter(); + + f.setMaxListeners(0); +} + +{ + var listen1 = function() {}; + var listen2 = function() {}; + var ee = new EventEmitter(); + + ee.once('newListener', function() { + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + ee.once('newListener', function() { + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + }); + ee.on('hello', listen2); + }); + ee.on('hello', listen1); + // The order of listeners on an event is not always the order in which the + // listeners were added. + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 2); + assert.strictEqual(listeners[0], listen2); + assert.strictEqual(listeners[1], listen1); +} + +// Verify that the listener must be a function +assert.throws(function() { + var ee = new EventEmitter(); + + ee.on('foo', null); +}, /^TypeError: The "listener" argument must be of type Function. Received type object$/); diff --git a/node_modules/events/tests/check-listener-leaks.js b/node_modules/events/tests/check-listener-leaks.js new file mode 100644 index 0000000..7fce48f --- /dev/null +++ b/node_modules/events/tests/check-listener-leaks.js @@ -0,0 +1,101 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var events = require('../'); + +// Redirect warning output to tape. +var consoleWarn = console.warn; +console.warn = common.test.comment; + +common.test.on('end', function () { + console.warn = consoleWarn; +}); + +// default +{ + var e = new events.EventEmitter(); + + for (var i = 0; i < 10; i++) { + e.on('default', common.mustNotCall()); + } + assert.ok(!e._events['default'].hasOwnProperty('warned')); + e.on('default', common.mustNotCall()); + assert.ok(e._events['default'].warned); + + // specific + e.setMaxListeners(5); + for (var i = 0; i < 5; i++) { + e.on('specific', common.mustNotCall()); + } + assert.ok(!e._events['specific'].hasOwnProperty('warned')); + e.on('specific', common.mustNotCall()); + assert.ok(e._events['specific'].warned); + + // only one + e.setMaxListeners(1); + e.on('only one', common.mustNotCall()); + assert.ok(!e._events['only one'].hasOwnProperty('warned')); + e.on('only one', common.mustNotCall()); + assert.ok(e._events['only one'].hasOwnProperty('warned')); + + // unlimited + e.setMaxListeners(0); + for (var i = 0; i < 1000; i++) { + e.on('unlimited', common.mustNotCall()); + } + assert.ok(!e._events['unlimited'].hasOwnProperty('warned')); +} + +// process-wide +{ + events.EventEmitter.defaultMaxListeners = 42; + var e = new events.EventEmitter(); + + for (var i = 0; i < 42; ++i) { + e.on('fortytwo', common.mustNotCall()); + } + assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); + e.on('fortytwo', common.mustNotCall()); + assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); + delete e._events['fortytwo'].warned; + + events.EventEmitter.defaultMaxListeners = 44; + e.on('fortytwo', common.mustNotCall()); + assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); + e.on('fortytwo', common.mustNotCall()); + assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); +} + +// but _maxListeners still has precedence over defaultMaxListeners +{ + events.EventEmitter.defaultMaxListeners = 42; + var e = new events.EventEmitter(); + e.setMaxListeners(1); + e.on('uno', common.mustNotCall()); + assert.ok(!e._events['uno'].hasOwnProperty('warned')); + e.on('uno', common.mustNotCall()); + assert.ok(e._events['uno'].hasOwnProperty('warned')); + + // chainable + assert.strictEqual(e, e.setMaxListeners(1)); +} diff --git a/node_modules/events/tests/common.js b/node_modules/events/tests/common.js new file mode 100644 index 0000000..49569b0 --- /dev/null +++ b/node_modules/events/tests/common.js @@ -0,0 +1,104 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var test = require('tape'); +var assert = require('assert'); + +var noop = function() {}; + +var mustCallChecks = []; + +function runCallChecks(exitCode) { + if (exitCode !== 0) return; + + var failed = filter(mustCallChecks, function(context) { + if ('minimum' in context) { + context.messageSegment = 'at least ' + context.minimum; + return context.actual < context.minimum; + } else { + context.messageSegment = 'exactly ' + context.exact; + return context.actual !== context.exact; + } + }); + + for (var i = 0; i < failed.length; i++) { + var context = failed[i]; + console.log('Mismatched %s function calls. Expected %s, actual %d.', + context.name, + context.messageSegment, + context.actual); + // IE8 has no .stack + if (context.stack) console.log(context.stack.split('\n').slice(2).join('\n')); + } + + assert.strictEqual(failed.length, 0); +} + +exports.mustCall = function(fn, exact) { + return _mustCallInner(fn, exact, 'exact'); +}; + +function _mustCallInner(fn, criteria, field) { + if (typeof criteria == 'undefined') criteria = 1; + + if (typeof fn === 'number') { + criteria = fn; + fn = noop; + } else if (fn === undefined) { + fn = noop; + } + + if (typeof criteria !== 'number') + throw new TypeError('Invalid ' + field + ' value: ' + criteria); + + var context = { + actual: 0, + stack: (new Error()).stack, + name: fn.name || '' + }; + + context[field] = criteria; + + // add the exit listener only once to avoid listener leak warnings + if (mustCallChecks.length === 0) test.onFinish(function() { runCallChecks(0); }); + + mustCallChecks.push(context); + + return function() { + context.actual++; + return fn.apply(this, arguments); + }; +} + +exports.mustNotCall = function(msg) { + return function mustNotCall() { + assert.fail(msg || 'function should not have been called'); + }; +}; + +function filter(arr, fn) { + if (arr.filter) return arr.filter(fn); + var filtered = []; + for (var i = 0; i < arr.length; i++) { + if (fn(arr[i], i, arr)) filtered.push(arr[i]); + } + return filtered +} diff --git a/node_modules/events/tests/errors.js b/node_modules/events/tests/errors.js new file mode 100644 index 0000000..a23df43 --- /dev/null +++ b/node_modules/events/tests/errors.js @@ -0,0 +1,13 @@ +'use strict'; +var assert = require('assert'); +var EventEmitter = require('../'); + +var EE = new EventEmitter(); + +assert.throws(function () { + EE.emit('error', 'Accepts a string'); +}, 'Error: Unhandled error. (Accepts a string)'); + +assert.throws(function () { + EE.emit('error', { message: 'Error!' }); +}, 'Unhandled error. ([object Object])'); diff --git a/node_modules/events/tests/events-list.js b/node_modules/events/tests/events-list.js new file mode 100644 index 0000000..08aa621 --- /dev/null +++ b/node_modules/events/tests/events-list.js @@ -0,0 +1,28 @@ +'use strict'; + +var EventEmitter = require('../'); +var assert = require('assert'); + +var EE = new EventEmitter(); +var m = function() {}; +EE.on('foo', function() {}); +assert.equal(1, EE.eventNames().length); +assert.equal('foo', EE.eventNames()[0]); +EE.on('bar', m); +assert.equal(2, EE.eventNames().length); +assert.equal('foo', EE.eventNames()[0]); +assert.equal('bar', EE.eventNames()[1]); +EE.removeListener('bar', m); +assert.equal(1, EE.eventNames().length); +assert.equal('foo', EE.eventNames()[0]); + +if (typeof Symbol !== 'undefined') { + var s = Symbol('s'); + EE.on(s, m); + assert.equal(2, EE.eventNames().length); + assert.equal('foo', EE.eventNames()[0]); + assert.equal(s, EE.eventNames()[1]); + EE.removeListener(s, m); + assert.equal(1, EE.eventNames().length); + assert.equal('foo', EE.eventNames()[0]); +} diff --git a/node_modules/events/tests/events-once.js b/node_modules/events/tests/events-once.js new file mode 100644 index 0000000..dae8649 --- /dev/null +++ b/node_modules/events/tests/events-once.js @@ -0,0 +1,234 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../').EventEmitter; +var once = require('../').once; +var has = require('has'); +var assert = require('assert'); + +function Event(type) { + this.type = type; +} + +function EventTargetMock() { + this.events = {}; + + this.addEventListener = common.mustCall(this.addEventListener); + this.removeEventListener = common.mustCall(this.removeEventListener); +} + +EventTargetMock.prototype.addEventListener = function addEventListener(name, listener, options) { + if (!(name in this.events)) { + this.events[name] = { listeners: [], options: options || {} } + } + this.events[name].listeners.push(listener); +}; + +EventTargetMock.prototype.removeEventListener = function removeEventListener(name, callback) { + if (!(name in this.events)) { + return; + } + var event = this.events[name]; + var stack = event.listeners; + + for (var i = 0, l = stack.length; i < l; i++) { + if (stack[i] === callback) { + stack.splice(i, 1); + if (stack.length === 0) { + delete this.events[name]; + } + return; + } + } +}; + +EventTargetMock.prototype.dispatchEvent = function dispatchEvent(arg) { + if (!(arg.type in this.events)) { + return true; + } + + var event = this.events[arg.type]; + var stack = event.listeners.slice(); + + for (var i = 0, l = stack.length; i < l; i++) { + stack[i].call(null, arg); + if (event.options.once) { + this.removeEventListener(arg.type, stack[i]); + } + } + return !arg.defaultPrevented; +}; + +function onceAnEvent() { + var ee = new EventEmitter(); + + process.nextTick(function () { + ee.emit('myevent', 42); + }); + + return once(ee, 'myevent').then(function (args) { + var value = args[0] + assert.strictEqual(value, 42); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function onceAnEventWithTwoArgs() { + var ee = new EventEmitter(); + + process.nextTick(function () { + ee.emit('myevent', 42, 24); + }); + + return once(ee, 'myevent').then(function (value) { + assert.strictEqual(value.length, 2); + assert.strictEqual(value[0], 42); + assert.strictEqual(value[1], 24); + }); +} + +function catchesErrors() { + var ee = new EventEmitter(); + + var expected = new Error('kaboom'); + var err; + process.nextTick(function () { + ee.emit('error', expected); + }); + + return once(ee, 'myevent').then(function () { + throw new Error('should reject') + }, function (err) { + assert.strictEqual(err, expected); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function stopListeningAfterCatchingError() { + var ee = new EventEmitter(); + + var expected = new Error('kaboom'); + var err; + process.nextTick(function () { + ee.emit('error', expected); + ee.emit('myevent', 42, 24); + }); + + // process.on('multipleResolves', common.mustNotCall()); + + return once(ee, 'myevent').then(common.mustNotCall, function (err) { + // process.removeAllListeners('multipleResolves'); + assert.strictEqual(err, expected); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function onceError() { + var ee = new EventEmitter(); + + var expected = new Error('kaboom'); + process.nextTick(function () { + ee.emit('error', expected); + }); + + var promise = once(ee, 'error'); + assert.strictEqual(ee.listenerCount('error'), 1); + return promise.then(function (args) { + var err = args[0] + assert.strictEqual(err, expected); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function onceWithEventTarget() { + var et = new EventTargetMock(); + var event = new Event('myevent'); + process.nextTick(function () { + et.dispatchEvent(event); + }); + return once(et, 'myevent').then(function (args) { + var value = args[0]; + assert.strictEqual(value, event); + assert.strictEqual(has(et.events, 'myevent'), false); + }); +} + +function onceWithEventTargetError() { + var et = new EventTargetMock(); + var error = new Event('error'); + process.nextTick(function () { + et.dispatchEvent(error); + }); + return once(et, 'error').then(function (args) { + var err = args[0]; + assert.strictEqual(err, error); + assert.strictEqual(has(et.events, 'error'), false); + }); +} + +function prioritizesEventEmitter() { + var ee = new EventEmitter(); + ee.addEventListener = assert.fail; + ee.removeAllListeners = assert.fail; + process.nextTick(function () { + ee.emit('foo'); + }); + return once(ee, 'foo'); +} + +var allTests = [ + onceAnEvent(), + onceAnEventWithTwoArgs(), + catchesErrors(), + stopListeningAfterCatchingError(), + onceError(), + onceWithEventTarget(), + onceWithEventTargetError(), + prioritizesEventEmitter() +]; + +var hasBrowserEventTarget = false; +try { + hasBrowserEventTarget = typeof (new window.EventTarget().addEventListener) === 'function' && + new window.Event('xyz').type === 'xyz'; +} catch (err) {} + +if (hasBrowserEventTarget) { + var onceWithBrowserEventTarget = function onceWithBrowserEventTarget() { + var et = new window.EventTarget(); + var event = new window.Event('myevent'); + process.nextTick(function () { + et.dispatchEvent(event); + }); + return once(et, 'myevent').then(function (args) { + var value = args[0]; + assert.strictEqual(value, event); + assert.strictEqual(has(et.events, 'myevent'), false); + }); + } + + var onceWithBrowserEventTargetError = function onceWithBrowserEventTargetError() { + var et = new window.EventTarget(); + var error = new window.Event('error'); + process.nextTick(function () { + et.dispatchEvent(error); + }); + return once(et, 'error').then(function (args) { + var err = args[0]; + assert.strictEqual(err, error); + assert.strictEqual(has(et.events, 'error'), false); + }); + } + + common.test.comment('Testing with browser built-in EventTarget'); + allTests.push([ + onceWithBrowserEventTarget(), + onceWithBrowserEventTargetError() + ]); +} + +module.exports = Promise.all(allTests); diff --git a/node_modules/events/tests/index.js b/node_modules/events/tests/index.js new file mode 100644 index 0000000..2d739e6 --- /dev/null +++ b/node_modules/events/tests/index.js @@ -0,0 +1,64 @@ +var test = require('tape'); +var functionsHaveNames = require('functions-have-names'); +var hasSymbols = require('has-symbols'); + +require('./legacy-compat'); +var common = require('./common'); + +// we do this to easily wrap each file in a mocha test +// and also have browserify be able to statically analyze this file +var orig_require = require; +var require = function(file) { + test(file, function(t) { + // Store the tape object so tests can access it. + t.on('end', function () { delete common.test; }); + common.test = t; + + try { + var exp = orig_require(file); + if (exp && exp.then) { + exp.then(function () { t.end(); }, t.fail); + return; + } + } catch (err) { + t.fail(err); + } + t.end(); + }); +}; + +require('./add-listeners.js'); +require('./check-listener-leaks.js'); +require('./errors.js'); +require('./events-list.js'); +if (typeof Promise === 'function') { + require('./events-once.js'); +} else { + // Promise support is not available. + test('./events-once.js', { skip: true }, function () {}); +} +require('./listener-count.js'); +require('./listeners-side-effects.js'); +require('./listeners.js'); +require('./max-listeners.js'); +if (functionsHaveNames()) { + require('./method-names.js'); +} else { + // Function.name is not supported in IE + test('./method-names.js', { skip: true }, function () {}); +} +require('./modify-in-emit.js'); +require('./num-args.js'); +require('./once.js'); +require('./prepend.js'); +require('./set-max-listeners-side-effects.js'); +require('./special-event-names.js'); +require('./subclass.js'); +if (hasSymbols()) { + require('./symbols.js'); +} else { + // Symbol is not available. + test('./symbols.js', { skip: true }, function () {}); +} +require('./remove-all-listeners.js'); +require('./remove-listeners.js'); diff --git a/node_modules/events/tests/legacy-compat.js b/node_modules/events/tests/legacy-compat.js new file mode 100644 index 0000000..a402be6 --- /dev/null +++ b/node_modules/events/tests/legacy-compat.js @@ -0,0 +1,16 @@ +// sigh... life is hard +if (!global.console) { + console = {} +} + +var fns = ['log', 'error', 'trace']; +for (var i=0 ; ifoo should not be emitted'); +} + +e.once('foo', remove); +e.removeListener('foo', remove); +e.emit('foo'); + +e.once('e', common.mustCall(function() { + e.emit('e'); +})); + +e.once('e', common.mustCall()); + +e.emit('e'); + +// Verify that the listener must be a function +assert.throws(function() { + var ee = new EventEmitter(); + + ee.once('foo', null); +}, /^TypeError: The "listener" argument must be of type Function. Received type object$/); + +{ + // once() has different code paths based on the number of arguments being + // emitted. Verify that all of the cases are covered. + var maxArgs = 4; + + for (var i = 0; i <= maxArgs; ++i) { + var ee = new EventEmitter(); + var args = ['foo']; + + for (var j = 0; j < i; ++j) + args.push(j); + + ee.once('foo', common.mustCall(function() { + var params = Array.prototype.slice.call(arguments); + var restArgs = args.slice(1); + assert.ok(Array.isArray(params)); + assert.strictEqual(params.length, restArgs.length); + for (var index = 0; index < params.length; index++) { + var param = params[index]; + assert.strictEqual(param, restArgs[index]); + } + })); + + EventEmitter.prototype.emit.apply(ee, args); + } +} diff --git a/node_modules/events/tests/prepend.js b/node_modules/events/tests/prepend.js new file mode 100644 index 0000000..79afde0 --- /dev/null +++ b/node_modules/events/tests/prepend.js @@ -0,0 +1,31 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../'); +var assert = require('assert'); + +var myEE = new EventEmitter(); +var m = 0; +// This one comes last. +myEE.on('foo', common.mustCall(function () { + assert.strictEqual(m, 2); +})); + +// This one comes second. +myEE.prependListener('foo', common.mustCall(function () { + assert.strictEqual(m++, 1); +})); + +// This one comes first. +myEE.prependOnceListener('foo', + common.mustCall(function () { + assert.strictEqual(m++, 0); + })); + +myEE.emit('foo'); + +// Verify that the listener must be a function +assert.throws(function () { + var ee = new EventEmitter(); + ee.prependOnceListener('foo', null); +}, 'TypeError: The "listener" argument must be of type Function. Received type object'); diff --git a/node_modules/events/tests/remove-all-listeners.js b/node_modules/events/tests/remove-all-listeners.js new file mode 100644 index 0000000..622941c --- /dev/null +++ b/node_modules/events/tests/remove-all-listeners.js @@ -0,0 +1,133 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var events = require('../'); +var test = require('tape'); + +function expect(expected) { + var actual = []; + test.onFinish(function() { + var sortedActual = actual.sort(); + var sortedExpected = expected.sort(); + assert.strictEqual(sortedActual.length, sortedExpected.length); + for (var index = 0; index < sortedActual.length; index++) { + var value = sortedActual[index]; + assert.strictEqual(value, sortedExpected[index]); + } + }); + function listener(name) { + actual.push(name); + } + return common.mustCall(listener, expected.length); +} + +{ + var ee = new events.EventEmitter(); + var noop = common.mustNotCall(); + ee.on('foo', noop); + ee.on('bar', noop); + ee.on('baz', noop); + ee.on('baz', noop); + var fooListeners = ee.listeners('foo'); + var barListeners = ee.listeners('bar'); + var bazListeners = ee.listeners('baz'); + ee.on('removeListener', expect(['bar', 'baz', 'baz'])); + ee.removeAllListeners('bar'); + ee.removeAllListeners('baz'); + + var listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], noop); + + listeners = ee.listeners('bar'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + listeners = ee.listeners('baz'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + // After calling removeAllListeners(), + // the old listeners array should stay unchanged. + assert.strictEqual(fooListeners.length, 1); + assert.strictEqual(fooListeners[0], noop); + assert.strictEqual(barListeners.length, 1); + assert.strictEqual(barListeners[0], noop); + assert.strictEqual(bazListeners.length, 2); + assert.strictEqual(bazListeners[0], noop); + assert.strictEqual(bazListeners[1], noop); + // After calling removeAllListeners(), + // new listeners arrays is different from the old. + assert.notStrictEqual(ee.listeners('bar'), barListeners); + assert.notStrictEqual(ee.listeners('baz'), bazListeners); +} + +{ + var ee = new events.EventEmitter(); + ee.on('foo', common.mustNotCall()); + ee.on('bar', common.mustNotCall()); + // Expect LIFO order + ee.on('removeListener', expect(['foo', 'bar', 'removeListener'])); + ee.on('removeListener', expect(['foo', 'bar'])); + ee.removeAllListeners(); + + var listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + listeners = ee.listeners('bar'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new events.EventEmitter(); + ee.on('removeListener', common.mustNotCall()); + // Check for regression where removeAllListeners() throws when + // there exists a 'removeListener' listener, but there exists + // no listeners for the provided event type. + assert.doesNotThrow(function () { ee.removeAllListeners(ee, 'foo') }); +} + +{ + var ee = new events.EventEmitter(); + var expectLength = 2; + ee.on('removeListener', function() { + assert.strictEqual(expectLength--, this.listeners('baz').length); + }); + ee.on('baz', common.mustNotCall()); + ee.on('baz', common.mustNotCall()); + ee.on('baz', common.mustNotCall()); + assert.strictEqual(ee.listeners('baz').length, expectLength + 1); + ee.removeAllListeners('baz'); + assert.strictEqual(ee.listeners('baz').length, 0); +} + +{ + var ee = new events.EventEmitter(); + assert.strictEqual(ee, ee.removeAllListeners()); +} + +{ + var ee = new events.EventEmitter(); + ee._events = undefined; + assert.strictEqual(ee, ee.removeAllListeners()); +} diff --git a/node_modules/events/tests/remove-listeners.js b/node_modules/events/tests/remove-listeners.js new file mode 100644 index 0000000..18e4d16 --- /dev/null +++ b/node_modules/events/tests/remove-listeners.js @@ -0,0 +1,212 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var EventEmitter = require('../'); + +var listener1 = function listener1() {}; +var listener2 = function listener2() {}; + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener1); + })); + ee.removeListener('hello', listener1); + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('removeListener', common.mustNotCall()); + ee.removeListener('hello', listener2); + + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener1); +} + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('hello', listener2); + + var listeners; + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener2); + })); + ee.removeListener('hello', listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener2); + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + })); + ee.removeListener('hello', listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new EventEmitter(); + + function remove1() { + assert.fail('remove1 should not have been called'); + } + + function remove2() { + assert.fail('remove2 should not have been called'); + } + + ee.on('removeListener', common.mustCall(function(name, cb) { + if (cb !== remove1) return; + this.removeListener('quux', remove2); + this.emit('quux'); + }, 2)); + ee.on('quux', remove1); + ee.on('quux', remove2); + ee.removeListener('quux', remove1); +} + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('hello', listener2); + + var listeners; + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener2); + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + })); + ee.removeListener('hello', listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + })); + ee.removeListener('hello', listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new EventEmitter(); + var listener3 = common.mustCall(function() { + ee.removeListener('hello', listener4); + }, 2); + var listener4 = common.mustCall(); + + ee.on('hello', listener3); + ee.on('hello', listener4); + + // listener4 will still be called although it is removed by listener 3. + ee.emit('hello'); + // This is so because the interal listener array at time of emit + // was [listener3,listener4] + + // Interal listener array [listener3] + ee.emit('hello'); +} + +{ + var ee = new EventEmitter(); + + ee.once('hello', listener1); + ee.on('removeListener', common.mustCall(function(eventName, listener) { + assert.strictEqual(eventName, 'hello'); + assert.strictEqual(listener, listener1); + })); + ee.emit('hello'); +} + +{ + var ee = new EventEmitter(); + + assert.strictEqual(ee, ee.removeListener('foo', function() {})); +} + +// Verify that the removed listener must be a function +assert.throws(function() { + var ee = new EventEmitter(); + + ee.removeListener('foo', null); +}, /^TypeError: The "listener" argument must be of type Function\. Received type object$/); + +{ + var ee = new EventEmitter(); + var listener = function() {}; + ee._events = undefined; + var e = ee.removeListener('foo', listener); + assert.strictEqual(e, ee); +} + +{ + var ee = new EventEmitter(); + + ee.on('foo', listener1); + ee.on('foo', listener2); + var listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 2); + assert.strictEqual(listeners[0], listener1); + assert.strictEqual(listeners[1], listener2); + + ee.removeListener('foo', listener1); + assert.strictEqual(ee._events.foo, listener2); + + ee.on('foo', listener1); + listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 2); + assert.strictEqual(listeners[0], listener2); + assert.strictEqual(listeners[1], listener1); + + ee.removeListener('foo', listener1); + assert.strictEqual(ee._events.foo, listener2); +} diff --git a/node_modules/events/tests/set-max-listeners-side-effects.js b/node_modules/events/tests/set-max-listeners-side-effects.js new file mode 100644 index 0000000..13dbb67 --- /dev/null +++ b/node_modules/events/tests/set-max-listeners-side-effects.js @@ -0,0 +1,31 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +require('./common'); +var assert = require('assert'); +var events = require('../'); + +var e = new events.EventEmitter(); + +if (Object.create) assert.ok(!(e._events instanceof Object)); +assert.strictEqual(Object.keys(e._events).length, 0); +e.setMaxListeners(5); +assert.strictEqual(Object.keys(e._events).length, 0); diff --git a/node_modules/events/tests/special-event-names.js b/node_modules/events/tests/special-event-names.js new file mode 100644 index 0000000..a2f0b74 --- /dev/null +++ b/node_modules/events/tests/special-event-names.js @@ -0,0 +1,45 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../'); +var assert = require('assert'); + +var ee = new EventEmitter(); +var handler = function() {}; + +assert.strictEqual(ee.eventNames().length, 0); + +assert.strictEqual(ee._events.hasOwnProperty, undefined); +assert.strictEqual(ee._events.toString, undefined); + +ee.on('__defineGetter__', handler); +ee.on('toString', handler); +ee.on('__proto__', handler); + +assert.strictEqual(ee.eventNames()[0], '__defineGetter__'); +assert.strictEqual(ee.eventNames()[1], 'toString'); + +assert.strictEqual(ee.listeners('__defineGetter__').length, 1); +assert.strictEqual(ee.listeners('__defineGetter__')[0], handler); +assert.strictEqual(ee.listeners('toString').length, 1); +assert.strictEqual(ee.listeners('toString')[0], handler); + +// Only run __proto__ tests if that property can actually be set +if ({ __proto__: 'ok' }.__proto__ === 'ok') { + assert.strictEqual(ee.eventNames().length, 3); + assert.strictEqual(ee.eventNames()[2], '__proto__'); + assert.strictEqual(ee.listeners('__proto__').length, 1); + assert.strictEqual(ee.listeners('__proto__')[0], handler); + + ee.on('__proto__', common.mustCall(function(val) { + assert.strictEqual(val, 1); + })); + ee.emit('__proto__', 1); + + process.on('__proto__', common.mustCall(function(val) { + assert.strictEqual(val, 1); + })); + process.emit('__proto__', 1); +} else { + console.log('# skipped __proto__') +} diff --git a/node_modules/events/tests/subclass.js b/node_modules/events/tests/subclass.js new file mode 100644 index 0000000..bd033ff --- /dev/null +++ b/node_modules/events/tests/subclass.js @@ -0,0 +1,66 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var test = require('tape'); +var assert = require('assert'); +var EventEmitter = require('../').EventEmitter; +var util = require('util'); + +util.inherits(MyEE, EventEmitter); + +function MyEE(cb) { + this.once(1, cb); + this.emit(1); + this.removeAllListeners(); + EventEmitter.call(this); +} + +var myee = new MyEE(common.mustCall()); + + +util.inherits(ErrorEE, EventEmitter); +function ErrorEE() { + this.emit('error', new Error('blerg')); +} + +assert.throws(function() { + new ErrorEE(); +}, /blerg/); + +test.onFinish(function() { + assert.ok(!(myee._events instanceof Object)); + assert.strictEqual(Object.keys(myee._events).length, 0); +}); + + +function MyEE2() { + EventEmitter.call(this); +} + +MyEE2.prototype = new EventEmitter(); + +var ee1 = new MyEE2(); +var ee2 = new MyEE2(); + +ee1.on('x', function() {}); + +assert.strictEqual(ee2.listenerCount('x'), 0); diff --git a/node_modules/events/tests/symbols.js b/node_modules/events/tests/symbols.js new file mode 100644 index 0000000..0721f0e --- /dev/null +++ b/node_modules/events/tests/symbols.js @@ -0,0 +1,25 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../'); +var assert = require('assert'); + +var ee = new EventEmitter(); +var foo = Symbol('foo'); +var listener = common.mustCall(); + +ee.on(foo, listener); +assert.strictEqual(ee.listeners(foo).length, 1); +assert.strictEqual(ee.listeners(foo)[0], listener); + +ee.emit(foo); + +ee.removeAllListeners(); +assert.strictEqual(ee.listeners(foo).length, 0); + +ee.on(foo, listener); +assert.strictEqual(ee.listeners(foo).length, 1); +assert.strictEqual(ee.listeners(foo)[0], listener); + +ee.removeListener(foo, listener); +assert.strictEqual(ee.listeners(foo).length, 0); diff --git a/node_modules/form-data/License b/node_modules/form-data/License new file mode 100644 index 0000000..c7ff12a --- /dev/null +++ b/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/form-data/README.md b/node_modules/form-data/README.md new file mode 100644 index 0000000..f6b2ae9 --- /dev/null +++ b/node_modules/form-data/README.md @@ -0,0 +1,350 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.5.1.svg?label=linux:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.5.1.svg?label=macos:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v2.5.1.svg?label=windows:4.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.5.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Array_ getHeaders( [**Array** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Array_ getHeaders( [**Array** _userHeaders_] ) +This method ads the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/README.md.bak b/node_modules/form-data/README.md.bak new file mode 100644 index 0000000..6077db9 --- /dev/null +++ b/node_modules/form-data/README.md.bak @@ -0,0 +1,350 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=linux:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=macos:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=windows:4.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/master.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Array_ getHeaders( [**Array** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Array_ getHeaders( [**Array** _userHeaders_] ) +This method ads the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/index.d.ts b/node_modules/form-data/index.d.ts new file mode 100644 index 0000000..255292d --- /dev/null +++ b/node_modules/form-data/index.d.ts @@ -0,0 +1,51 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +interface Options { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/form-data/lib/browser.js b/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000..09e7c70 --- /dev/null +++ b/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000..9c07e32 --- /dev/null +++ b/node_modules/form-data/lib/form_data.js @@ -0,0 +1,483 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + this.pipe(request); + if (cb) { + request.on('error', cb); + request.on('response', cb.bind(this, null)); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/form-data/lib/populate.js b/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000..4d35738 --- /dev/null +++ b/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/form-data/package.json b/node_modules/form-data/package.json new file mode 100644 index 0000000..7423f20 --- /dev/null +++ b/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "2.5.1", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 0.12" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.13", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md new file mode 100644 index 0000000..7436f64 --- /dev/null +++ b/node_modules/mime-db/HISTORY.md @@ -0,0 +1,507 @@ +1.52.0 / 2022-02-21 +=================== + + * Add extensions from IANA for more `image/*` types + * Add extension `.asc` to `application/pgp-keys` + * Add extensions to various XML types + * Add new upstream MIME types + +1.51.0 / 2021-11-08 +=================== + + * Add new upstream MIME types + * Mark `image/vnd.microsoft.icon` as compressible + * Mark `image/vnd.ms-dds` as compressible + +1.50.0 / 2021-09-15 +=================== + + * Add deprecated iWorks mime types and extensions + * Add new upstream MIME types + +1.49.0 / 2021-07-26 +=================== + + * Add extension `.trig` to `application/trig` + * Add new upstream MIME types + +1.48.0 / 2021-05-30 +=================== + + * Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + * Add new upstream MIME types + * Mark `text/yaml` as compressible + +1.47.0 / 2021-04-01 +=================== + + * Add new upstream MIME types + * Remove ambigious extensions from IANA for `application/*+xml` types + * Update primary extension to `.es` for `application/ecmascript` + +1.46.0 / 2021-02-13 +=================== + + * Add extension `.amr` to `audio/amr` + * Add extension `.m4s` to `video/iso.segment` + * Add extension `.opus` to `audio/ogg` + * Add new upstream MIME types + +1.45.0 / 2020-09-22 +=================== + + * Add `application/ubjson` with extension `.ubj` + * Add `image/avif` with extension `.avif` + * Add `image/ktx2` with extension `.ktx2` + * Add extension `.dbf` to `application/vnd.dbf` + * Add extension `.rar` to `application/vnd.rar` + * Add extension `.td` to `application/urc-targetdesc+xml` + * Add new upstream MIME types + * Fix extension of `application/vnd.apple.keynote` to be `.key` + +1.44.0 / 2020-04-22 +=================== + + * Add charsets from IANA + * Add extension `.cjs` to `application/node` + * Add new upstream MIME types + +1.43.0 / 2020-01-05 +=================== + + * Add `application/x-keepass2` with extension `.kdbx` + * Add extension `.mxmf` to `audio/mobile-xmf` + * Add extensions from IANA for `application/*+xml` types + * Add new upstream MIME types + +1.42.0 / 2019-09-25 +=================== + + * Add `image/vnd.ms-dds` with extension `.dds` + * Add new upstream MIME types + * Remove compressible from `multipart/mixed` + +1.41.0 / 2019-08-30 +=================== + + * Add new upstream MIME types + * Add `application/toml` with extension `.toml` + * Mark `font/ttf` as compressible + +1.40.0 / 2019-04-20 +=================== + + * Add extensions from IANA for `model/*` types + * Add `text/mdx` with extension `.mdx` + +1.39.0 / 2019-04-04 +=================== + + * Add extensions `.siv` and `.sieve` to `application/sieve` + * Add new upstream MIME types + +1.38.0 / 2019-02-04 +=================== + + * Add extension `.nq` to `application/n-quads` + * Add extension `.nt` to `application/n-triples` + * Add new upstream MIME types + * Mark `text/less` as compressible + +1.37.0 / 2018-10-19 +=================== + + * Add extensions to HEIC image types + * Add new upstream MIME types + +1.36.0 / 2018-08-20 +=================== + + * Add Apple file extensions from IANA + * Add extensions from IANA for `image/*` types + * Add new upstream MIME types + +1.35.0 / 2018-07-15 +=================== + + * Add extension `.owl` to `application/rdf+xml` + * Add new upstream MIME types + - Removes extension `.woff` from `application/font-woff` + +1.34.0 / 2018-06-03 +=================== + + * Add extension `.csl` to `application/vnd.citationstyles.style+xml` + * Add extension `.es` to `application/ecmascript` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/turtle` + * Mark all XML-derived types as compressible + +1.33.0 / 2018-02-15 +=================== + + * Add extensions from IANA for `message/*` types + * Add new upstream MIME types + * Fix some incorrect OOXML types + * Remove `application/font-woff2` + +1.32.0 / 2017-11-29 +=================== + + * Add new upstream MIME types + * Update `text/hjson` to registered `application/hjson` + * Add `text/shex` with extension `.shex` + +1.31.0 / 2017-10-25 +=================== + + * Add `application/raml+yaml` with extension `.raml` + * Add `application/wasm` with extension `.wasm` + * Add new `font` type from IANA + * Add new upstream font extensions + * Add new upstream MIME types + * Add extensions for JPEG-2000 images + +1.30.0 / 2017-08-27 +=================== + + * Add `application/vnd.ms-outlook` + * Add `application/x-arj` + * Add extension `.mjs` to `application/javascript` + * Add glTF types and extensions + * Add new upstream MIME types + * Add `text/x-org` + * Add VirtualBox MIME types + * Fix `source` records for `video/*` types that are IANA + * Update `font/opentype` to registered `font/otf` + +1.29.0 / 2017-07-10 +=================== + + * Add `application/fido.trusted-apps+json` + * Add extension `.wadl` to `application/vnd.sun.wadl+xml` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/css` + +1.28.0 / 2017-05-14 +=================== + + * Add new upstream MIME types + * Add extension `.gz` to `application/gzip` + * Update extensions `.md` and `.markdown` to be `text/markdown` + +1.27.0 / 2017-03-16 +=================== + + * Add new upstream MIME types + * Add `image/apng` with extension `.apng` + +1.26.0 / 2017-01-14 +=================== + + * Add new upstream MIME types + * Add extension `.geojson` to `application/geo+json` + +1.25.0 / 2016-11-11 +=================== + + * Add new upstream MIME types + +1.24.0 / 2016-09-18 +=================== + + * Add `audio/mp3` + * Add new upstream MIME types + +1.23.0 / 2016-05-01 +=================== + + * Add new upstream MIME types + * Add extension `.3gpp` to `audio/3gpp` + +1.22.0 / 2016-02-15 +=================== + + * Add `text/slim` + * Add extension `.rng` to `application/xml` + * Add new upstream MIME types + * Fix extension of `application/dash+xml` to be `.mpd` + * Update primary extension to `.m4a` for `audio/mp4` + +1.21.0 / 2016-01-06 +=================== + + * Add Google document types + * Add new upstream MIME types + +1.20.0 / 2015-11-10 +=================== + + * Add `text/x-suse-ymp` + * Add new upstream MIME types + +1.19.0 / 2015-09-17 +=================== + + * Add `application/vnd.apple.pkpass` + * Add new upstream MIME types + +1.18.0 / 2015-09-03 +=================== + + * Add new upstream MIME types + +1.17.0 / 2015-08-13 +=================== + + * Add `application/x-msdos-program` + * Add `audio/g711-0` + * Add `image/vnd.mozilla.apng` + * Add extension `.exe` to `application/x-msdos-program` + +1.16.0 / 2015-07-29 +=================== + + * Add `application/vnd.uri-map` + +1.15.0 / 2015-07-13 +=================== + + * Add `application/x-httpd-php` + +1.14.0 / 2015-06-25 +=================== + + * Add `application/scim+json` + * Add `application/vnd.3gpp.ussd+xml` + * Add `application/vnd.biopax.rdf+xml` + * Add `text/x-processing` + +1.13.0 / 2015-06-07 +=================== + + * Add nginx as a source + * Add `application/x-cocoa` + * Add `application/x-java-archive-diff` + * Add `application/x-makeself` + * Add `application/x-perl` + * Add `application/x-pilot` + * Add `application/x-redhat-package-manager` + * Add `application/x-sea` + * Add `audio/x-m4a` + * Add `audio/x-realaudio` + * Add `image/x-jng` + * Add `text/mathml` + +1.12.0 / 2015-06-05 +=================== + + * Add `application/bdoc` + * Add `application/vnd.hyperdrive+json` + * Add `application/x-bdoc` + * Add extension `.rtf` to `text/rtf` + +1.11.0 / 2015-05-31 +=================== + + * Add `audio/wav` + * Add `audio/wave` + * Add extension `.litcoffee` to `text/coffeescript` + * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` + * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` + +1.10.0 / 2015-05-19 +=================== + + * Add `application/vnd.balsamiq.bmpr` + * Add `application/vnd.microsoft.portable-executable` + * Add `application/x-ns-proxy-autoconfig` + +1.9.1 / 2015-04-19 +================== + + * Remove `.json` extension from `application/manifest+json` + - This is causing bugs downstream + +1.9.0 / 2015-04-19 +================== + + * Add `application/manifest+json` + * Add `application/vnd.micro+json` + * Add `image/vnd.zbrush.pcx` + * Add `image/x-ms-bmp` + +1.8.0 / 2015-03-13 +================== + + * Add `application/vnd.citationstyles.style+xml` + * Add `application/vnd.fastcopy-disk-image` + * Add `application/vnd.gov.sk.xmldatacontainer+xml` + * Add extension `.jsonld` to `application/ld+json` + +1.7.0 / 2015-02-08 +================== + + * Add `application/vnd.gerber` + * Add `application/vnd.msa-disk-image` + +1.6.1 / 2015-02-05 +================== + + * Community extensions ownership transferred from `node-mime` + +1.6.0 / 2015-01-29 +================== + + * Add `application/jose` + * Add `application/jose+json` + * Add `application/json-seq` + * Add `application/jwk+json` + * Add `application/jwk-set+json` + * Add `application/jwt` + * Add `application/rdap+json` + * Add `application/vnd.gov.sk.e-form+xml` + * Add `application/vnd.ims.imsccv1p3` + +1.5.0 / 2014-12-30 +================== + + * Add `application/vnd.oracle.resource+json` + * Fix various invalid MIME type entries + - `application/mbox+xml` + - `application/oscp-response` + - `application/vwg-multiplexed` + - `audio/g721` + +1.4.0 / 2014-12-21 +================== + + * Add `application/vnd.ims.imsccv1p2` + * Fix various invalid MIME type entries + - `application/vnd-acucobol` + - `application/vnd-curl` + - `application/vnd-dart` + - `application/vnd-dxr` + - `application/vnd-fdf` + - `application/vnd-mif` + - `application/vnd-sema` + - `application/vnd-wap-wmlc` + - `application/vnd.adobe.flash-movie` + - `application/vnd.dece-zip` + - `application/vnd.dvb_service` + - `application/vnd.micrografx-igx` + - `application/vnd.sealed-doc` + - `application/vnd.sealed-eml` + - `application/vnd.sealed-mht` + - `application/vnd.sealed-ppt` + - `application/vnd.sealed-tiff` + - `application/vnd.sealed-xls` + - `application/vnd.sealedmedia.softseal-html` + - `application/vnd.sealedmedia.softseal-pdf` + - `application/vnd.wap-slc` + - `application/vnd.wap-wbxml` + - `audio/vnd.sealedmedia.softseal-mpeg` + - `image/vnd-djvu` + - `image/vnd-svf` + - `image/vnd-wap-wbmp` + - `image/vnd.sealed-png` + - `image/vnd.sealedmedia.softseal-gif` + - `image/vnd.sealedmedia.softseal-jpg` + - `model/vnd-dwf` + - `model/vnd.parasolid.transmit-binary` + - `model/vnd.parasolid.transmit-text` + - `text/vnd-a` + - `text/vnd-curl` + - `text/vnd.wap-wml` + * Remove example template MIME types + - `application/example` + - `audio/example` + - `image/example` + - `message/example` + - `model/example` + - `multipart/example` + - `text/example` + - `video/example` + +1.3.1 / 2014-12-16 +================== + + * Fix missing extensions + - `application/json5` + - `text/hjson` + +1.3.0 / 2014-12-07 +================== + + * Add `application/a2l` + * Add `application/aml` + * Add `application/atfx` + * Add `application/atxml` + * Add `application/cdfx+xml` + * Add `application/dii` + * Add `application/json5` + * Add `application/lxf` + * Add `application/mf4` + * Add `application/vnd.apache.thrift.compact` + * Add `application/vnd.apache.thrift.json` + * Add `application/vnd.coffeescript` + * Add `application/vnd.enphase.envoy` + * Add `application/vnd.ims.imsccv1p1` + * Add `text/csv-schema` + * Add `text/hjson` + * Add `text/markdown` + * Add `text/yaml` + +1.2.0 / 2014-11-09 +================== + + * Add `application/cea` + * Add `application/dit` + * Add `application/vnd.gov.sk.e-form+zip` + * Add `application/vnd.tmd.mediaflex.api+xml` + * Type `application/epub+zip` is now IANA-registered + +1.1.2 / 2014-10-23 +================== + + * Rebuild database for `application/x-www-form-urlencoded` change + +1.1.1 / 2014-10-20 +================== + + * Mark `application/x-www-form-urlencoded` as compressible. + +1.1.0 / 2014-09-28 +================== + + * Add `application/font-woff2` + +1.0.3 / 2014-09-25 +================== + + * Fix engine requirement in package + +1.0.2 / 2014-09-25 +================== + + * Add `application/coap-group+json` + * Add `application/dcd` + * Add `application/vnd.apache.thrift.binary` + * Add `image/vnd.tencent.tap` + * Mark all JSON-derived types as compressible + * Update `text/vtt` data + +1.0.1 / 2014-08-30 +================== + + * Fix extension ordering + +1.0.0 / 2014-08-30 +================== + + * Add `application/atf` + * Add `application/merge-patch+json` + * Add `multipart/x-mixed-replace` + * Add `source: 'apache'` metadata + * Add `source: 'iana'` metadata + * Remove badly-assumed charset data diff --git a/node_modules/mime-db/LICENSE b/node_modules/mime-db/LICENSE new file mode 100644 index 0000000..0751cb1 --- /dev/null +++ b/node_modules/mime-db/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015-2022 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md new file mode 100644 index 0000000..5a8fcfe --- /dev/null +++ b/node_modules/mime-db/README.md @@ -0,0 +1,100 @@ +# mime-db + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +This is a large database of mime types and information about them. +It consists of a single, public JSON file and does not include any logic, +allowing it to remain as un-opinionated as possible with an API. +It aggregates data from the following sources: + +- http://www.iana.org/assignments/media-types/media-types.xhtml +- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types + +## Installation + +```bash +npm install mime-db +``` + +### Database Download + +If you're crazy enough to use this in the browser, you can just grab the +JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to +replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) +as the JSON format may change in the future. + +``` +https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json +``` + +## Usage + +```js +var db = require('mime-db') + +// grab data on .js files +var data = db['application/javascript'] +``` + +## Data Structure + +The JSON file is a map lookup for lowercased mime types. +Each mime type has the following properties: + +- `.source` - where the mime type is defined. + If not set, it's probably a custom media type. + - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) + - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) + - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) +- `.extensions[]` - known extensions associated with this mime type. +- `.compressible` - whether a file of this type can be gzipped. +- `.charset` - the default charset associated with this type, if any. + +If unknown, every property could be `undefined`. + +## Contributing + +To edit the database, only make PRs against `src/custom-types.json` or +`src/custom-suffix.json`. + +The `src/custom-types.json` file is a JSON object with the MIME type as the +keys and the values being an object with the following keys: + +- `compressible` - leave out if you don't know, otherwise `true`/`false` to + indicate whether the data represented by the type is typically compressible. +- `extensions` - include an array of file extensions that are associated with + the type. +- `notes` - human-readable notes about the type, typically what the type is. +- `sources` - include an array of URLs of where the MIME type and the associated + extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); + links to type aggregating sites and Wikipedia are _not acceptable_. + +To update the build, run `npm run build`. + +### Adding Custom Media Types + +The best way to get new media types included in this library is to register +them with the IANA. The community registration procedure is outlined in +[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types +registered with the IANA are automatically pulled into this library. + +If that is not possible / feasible, they can be added directly here as a +"custom" type. To do this, it is required to have a primary source that +definitively lists the media type. If an extension is going to be listed as +associateed with this media type, the source must definitively link the +media type and extension as well. + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-db/master?label=ci +[ci-url]: https://github.com/jshttp/mime-db/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master +[node-image]: https://badgen.net/npm/node/mime-db +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-db +[npm-url]: https://npmjs.org/package/mime-db +[npm-version-image]: https://badgen.net/npm/v/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json new file mode 100644 index 0000000..eb9c42c --- /dev/null +++ b/node_modules/mime-db/db.json @@ -0,0 +1,8519 @@ +{ + "application/1d-interleaved-parityfec": { + "source": "iana" + }, + "application/3gpdash-qoe-report+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/3gpp-ims+xml": { + "source": "iana", + "compressible": true + }, + "application/3gpphal+json": { + "source": "iana", + "compressible": true + }, + "application/3gpphalforms+json": { + "source": "iana", + "compressible": true + }, + "application/a2l": { + "source": "iana" + }, + "application/ace+cbor": { + "source": "iana" + }, + "application/activemessage": { + "source": "iana" + }, + "application/activity+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-directory+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcost+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcostparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointprop+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointpropparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-error+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamcontrol+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamparams+json": { + "source": "iana", + "compressible": true + }, + "application/aml": { + "source": "iana" + }, + "application/andrew-inset": { + "source": "iana", + "extensions": ["ez"] + }, + "application/applefile": { + "source": "iana" + }, + "application/applixware": { + "source": "apache", + "extensions": ["aw"] + }, + "application/at+jwt": { + "source": "iana" + }, + "application/atf": { + "source": "iana" + }, + "application/atfx": { + "source": "iana" + }, + "application/atom+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atom"] + }, + "application/atomcat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomcat"] + }, + "application/atomdeleted+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomdeleted"] + }, + "application/atomicmail": { + "source": "iana" + }, + "application/atomsvc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomsvc"] + }, + "application/atsc-dwd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dwd"] + }, + "application/atsc-dynamic-event-message": { + "source": "iana" + }, + "application/atsc-held+xml": { + "source": "iana", + "compressible": true, + "extensions": ["held"] + }, + "application/atsc-rdt+json": { + "source": "iana", + "compressible": true + }, + "application/atsc-rsat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsat"] + }, + "application/atxml": { + "source": "iana" + }, + "application/auth-policy+xml": { + "source": "iana", + "compressible": true + }, + "application/bacnet-xdd+zip": { + "source": "iana", + "compressible": false + }, + "application/batch-smtp": { + "source": "iana" + }, + "application/bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/beep+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/calendar+json": { + "source": "iana", + "compressible": true + }, + "application/calendar+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xcs"] + }, + "application/call-completion": { + "source": "iana" + }, + "application/cals-1840": { + "source": "iana" + }, + "application/captive+json": { + "source": "iana", + "compressible": true + }, + "application/cbor": { + "source": "iana" + }, + "application/cbor-seq": { + "source": "iana" + }, + "application/cccex": { + "source": "iana" + }, + "application/ccmp+xml": { + "source": "iana", + "compressible": true + }, + "application/ccxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ccxml"] + }, + "application/cdfx+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdfx"] + }, + "application/cdmi-capability": { + "source": "iana", + "extensions": ["cdmia"] + }, + "application/cdmi-container": { + "source": "iana", + "extensions": ["cdmic"] + }, + "application/cdmi-domain": { + "source": "iana", + "extensions": ["cdmid"] + }, + "application/cdmi-object": { + "source": "iana", + "extensions": ["cdmio"] + }, + "application/cdmi-queue": { + "source": "iana", + "extensions": ["cdmiq"] + }, + "application/cdni": { + "source": "iana" + }, + "application/cea": { + "source": "iana" + }, + "application/cea-2018+xml": { + "source": "iana", + "compressible": true + }, + "application/cellml+xml": { + "source": "iana", + "compressible": true + }, + "application/cfw": { + "source": "iana" + }, + "application/city+json": { + "source": "iana", + "compressible": true + }, + "application/clr": { + "source": "iana" + }, + "application/clue+xml": { + "source": "iana", + "compressible": true + }, + "application/clue_info+xml": { + "source": "iana", + "compressible": true + }, + "application/cms": { + "source": "iana" + }, + "application/cnrp+xml": { + "source": "iana", + "compressible": true + }, + "application/coap-group+json": { + "source": "iana", + "compressible": true + }, + "application/coap-payload": { + "source": "iana" + }, + "application/commonground": { + "source": "iana" + }, + "application/conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/cose": { + "source": "iana" + }, + "application/cose-key": { + "source": "iana" + }, + "application/cose-key-set": { + "source": "iana" + }, + "application/cpl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cpl"] + }, + "application/csrattrs": { + "source": "iana" + }, + "application/csta+xml": { + "source": "iana", + "compressible": true + }, + "application/cstadata+xml": { + "source": "iana", + "compressible": true + }, + "application/csvm+json": { + "source": "iana", + "compressible": true + }, + "application/cu-seeme": { + "source": "apache", + "extensions": ["cu"] + }, + "application/cwt": { + "source": "iana" + }, + "application/cybercash": { + "source": "iana" + }, + "application/dart": { + "compressible": true + }, + "application/dash+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpd"] + }, + "application/dash-patch+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpp"] + }, + "application/dashdelta": { + "source": "iana" + }, + "application/davmount+xml": { + "source": "iana", + "compressible": true, + "extensions": ["davmount"] + }, + "application/dca-rft": { + "source": "iana" + }, + "application/dcd": { + "source": "iana" + }, + "application/dec-dx": { + "source": "iana" + }, + "application/dialog-info+xml": { + "source": "iana", + "compressible": true + }, + "application/dicom": { + "source": "iana" + }, + "application/dicom+json": { + "source": "iana", + "compressible": true + }, + "application/dicom+xml": { + "source": "iana", + "compressible": true + }, + "application/dii": { + "source": "iana" + }, + "application/dit": { + "source": "iana" + }, + "application/dns": { + "source": "iana" + }, + "application/dns+json": { + "source": "iana", + "compressible": true + }, + "application/dns-message": { + "source": "iana" + }, + "application/docbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dbk"] + }, + "application/dots+cbor": { + "source": "iana" + }, + "application/dskpp+xml": { + "source": "iana", + "compressible": true + }, + "application/dssc+der": { + "source": "iana", + "extensions": ["dssc"] + }, + "application/dssc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdssc"] + }, + "application/dvcs": { + "source": "iana" + }, + "application/ecmascript": { + "source": "iana", + "compressible": true, + "extensions": ["es","ecma"] + }, + "application/edi-consent": { + "source": "iana" + }, + "application/edi-x12": { + "source": "iana", + "compressible": false + }, + "application/edifact": { + "source": "iana", + "compressible": false + }, + "application/efi": { + "source": "iana" + }, + "application/elm+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/elm+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.cap+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/emergencycalldata.comment+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.control+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.deviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.ecall.msd": { + "source": "iana" + }, + "application/emergencycalldata.providerinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.serviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.subscriberinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.veds+xml": { + "source": "iana", + "compressible": true + }, + "application/emma+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emma"] + }, + "application/emotionml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emotionml"] + }, + "application/encaprtp": { + "source": "iana" + }, + "application/epp+xml": { + "source": "iana", + "compressible": true + }, + "application/epub+zip": { + "source": "iana", + "compressible": false, + "extensions": ["epub"] + }, + "application/eshop": { + "source": "iana" + }, + "application/exi": { + "source": "iana", + "extensions": ["exi"] + }, + "application/expect-ct-report+json": { + "source": "iana", + "compressible": true + }, + "application/express": { + "source": "iana", + "extensions": ["exp"] + }, + "application/fastinfoset": { + "source": "iana" + }, + "application/fastsoap": { + "source": "iana" + }, + "application/fdt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fdt"] + }, + "application/fhir+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fhir+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fido.trusted-apps+json": { + "compressible": true + }, + "application/fits": { + "source": "iana" + }, + "application/flexfec": { + "source": "iana" + }, + "application/font-sfnt": { + "source": "iana" + }, + "application/font-tdpfr": { + "source": "iana", + "extensions": ["pfr"] + }, + "application/font-woff": { + "source": "iana", + "compressible": false + }, + "application/framework-attributes+xml": { + "source": "iana", + "compressible": true + }, + "application/geo+json": { + "source": "iana", + "compressible": true, + "extensions": ["geojson"] + }, + "application/geo+json-seq": { + "source": "iana" + }, + "application/geopackage+sqlite3": { + "source": "iana" + }, + "application/geoxacml+xml": { + "source": "iana", + "compressible": true + }, + "application/gltf-buffer": { + "source": "iana" + }, + "application/gml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["gml"] + }, + "application/gpx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["gpx"] + }, + "application/gxf": { + "source": "apache", + "extensions": ["gxf"] + }, + "application/gzip": { + "source": "iana", + "compressible": false, + "extensions": ["gz"] + }, + "application/h224": { + "source": "iana" + }, + "application/held+xml": { + "source": "iana", + "compressible": true + }, + "application/hjson": { + "extensions": ["hjson"] + }, + "application/http": { + "source": "iana" + }, + "application/hyperstudio": { + "source": "iana", + "extensions": ["stk"] + }, + "application/ibe-key-request+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pkg-reply+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pp-data": { + "source": "iana" + }, + "application/iges": { + "source": "iana" + }, + "application/im-iscomposing+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/index": { + "source": "iana" + }, + "application/index.cmd": { + "source": "iana" + }, + "application/index.obj": { + "source": "iana" + }, + "application/index.response": { + "source": "iana" + }, + "application/index.vnd": { + "source": "iana" + }, + "application/inkml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ink","inkml"] + }, + "application/iotp": { + "source": "iana" + }, + "application/ipfix": { + "source": "iana", + "extensions": ["ipfix"] + }, + "application/ipp": { + "source": "iana" + }, + "application/isup": { + "source": "iana" + }, + "application/its+xml": { + "source": "iana", + "compressible": true, + "extensions": ["its"] + }, + "application/java-archive": { + "source": "apache", + "compressible": false, + "extensions": ["jar","war","ear"] + }, + "application/java-serialized-object": { + "source": "apache", + "compressible": false, + "extensions": ["ser"] + }, + "application/java-vm": { + "source": "apache", + "compressible": false, + "extensions": ["class"] + }, + "application/javascript": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["js","mjs"] + }, + "application/jf2feed+json": { + "source": "iana", + "compressible": true + }, + "application/jose": { + "source": "iana" + }, + "application/jose+json": { + "source": "iana", + "compressible": true + }, + "application/jrd+json": { + "source": "iana", + "compressible": true + }, + "application/jscalendar+json": { + "source": "iana", + "compressible": true + }, + "application/json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["json","map"] + }, + "application/json-patch+json": { + "source": "iana", + "compressible": true + }, + "application/json-seq": { + "source": "iana" + }, + "application/json5": { + "extensions": ["json5"] + }, + "application/jsonml+json": { + "source": "apache", + "compressible": true, + "extensions": ["jsonml"] + }, + "application/jwk+json": { + "source": "iana", + "compressible": true + }, + "application/jwk-set+json": { + "source": "iana", + "compressible": true + }, + "application/jwt": { + "source": "iana" + }, + "application/kpml-request+xml": { + "source": "iana", + "compressible": true + }, + "application/kpml-response+xml": { + "source": "iana", + "compressible": true + }, + "application/ld+json": { + "source": "iana", + "compressible": true, + "extensions": ["jsonld"] + }, + "application/lgr+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lgr"] + }, + "application/link-format": { + "source": "iana" + }, + "application/load-control+xml": { + "source": "iana", + "compressible": true + }, + "application/lost+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lostxml"] + }, + "application/lostsync+xml": { + "source": "iana", + "compressible": true + }, + "application/lpf+zip": { + "source": "iana", + "compressible": false + }, + "application/lxf": { + "source": "iana" + }, + "application/mac-binhex40": { + "source": "iana", + "extensions": ["hqx"] + }, + "application/mac-compactpro": { + "source": "apache", + "extensions": ["cpt"] + }, + "application/macwriteii": { + "source": "iana" + }, + "application/mads+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mads"] + }, + "application/manifest+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["webmanifest"] + }, + "application/marc": { + "source": "iana", + "extensions": ["mrc"] + }, + "application/marcxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mrcx"] + }, + "application/mathematica": { + "source": "iana", + "extensions": ["ma","nb","mb"] + }, + "application/mathml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mathml"] + }, + "application/mathml-content+xml": { + "source": "iana", + "compressible": true + }, + "application/mathml-presentation+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-associated-procedure-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-deregister+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-envelope+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-protection-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-reception-report+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-schedule+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-user-service-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbox": { + "source": "iana", + "extensions": ["mbox"] + }, + "application/media-policy-dataset+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpf"] + }, + "application/media_control+xml": { + "source": "iana", + "compressible": true + }, + "application/mediaservercontrol+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mscml"] + }, + "application/merge-patch+json": { + "source": "iana", + "compressible": true + }, + "application/metalink+xml": { + "source": "apache", + "compressible": true, + "extensions": ["metalink"] + }, + "application/metalink4+xml": { + "source": "iana", + "compressible": true, + "extensions": ["meta4"] + }, + "application/mets+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mets"] + }, + "application/mf4": { + "source": "iana" + }, + "application/mikey": { + "source": "iana" + }, + "application/mipc": { + "source": "iana" + }, + "application/missing-blocks+cbor-seq": { + "source": "iana" + }, + "application/mmt-aei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["maei"] + }, + "application/mmt-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musd"] + }, + "application/mods+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mods"] + }, + "application/moss-keys": { + "source": "iana" + }, + "application/moss-signature": { + "source": "iana" + }, + "application/mosskey-data": { + "source": "iana" + }, + "application/mosskey-request": { + "source": "iana" + }, + "application/mp21": { + "source": "iana", + "extensions": ["m21","mp21"] + }, + "application/mp4": { + "source": "iana", + "extensions": ["mp4s","m4p"] + }, + "application/mpeg4-generic": { + "source": "iana" + }, + "application/mpeg4-iod": { + "source": "iana" + }, + "application/mpeg4-iod-xmt": { + "source": "iana" + }, + "application/mrb-consumer+xml": { + "source": "iana", + "compressible": true + }, + "application/mrb-publish+xml": { + "source": "iana", + "compressible": true + }, + "application/msc-ivr+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msc-mixer+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msword": { + "source": "iana", + "compressible": false, + "extensions": ["doc","dot"] + }, + "application/mud+json": { + "source": "iana", + "compressible": true + }, + "application/multipart-core": { + "source": "iana" + }, + "application/mxf": { + "source": "iana", + "extensions": ["mxf"] + }, + "application/n-quads": { + "source": "iana", + "extensions": ["nq"] + }, + "application/n-triples": { + "source": "iana", + "extensions": ["nt"] + }, + "application/nasdata": { + "source": "iana" + }, + "application/news-checkgroups": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-groupinfo": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-transmission": { + "source": "iana" + }, + "application/nlsml+xml": { + "source": "iana", + "compressible": true + }, + "application/node": { + "source": "iana", + "extensions": ["cjs"] + }, + "application/nss": { + "source": "iana" + }, + "application/oauth-authz-req+jwt": { + "source": "iana" + }, + "application/oblivious-dns-message": { + "source": "iana" + }, + "application/ocsp-request": { + "source": "iana" + }, + "application/ocsp-response": { + "source": "iana" + }, + "application/octet-stream": { + "source": "iana", + "compressible": false, + "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] + }, + "application/oda": { + "source": "iana", + "extensions": ["oda"] + }, + "application/odm+xml": { + "source": "iana", + "compressible": true + }, + "application/odx": { + "source": "iana" + }, + "application/oebps-package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["opf"] + }, + "application/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogx"] + }, + "application/omdoc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["omdoc"] + }, + "application/onenote": { + "source": "apache", + "extensions": ["onetoc","onetoc2","onetmp","onepkg"] + }, + "application/opc-nodeset+xml": { + "source": "iana", + "compressible": true + }, + "application/oscore": { + "source": "iana" + }, + "application/oxps": { + "source": "iana", + "extensions": ["oxps"] + }, + "application/p21": { + "source": "iana" + }, + "application/p21+zip": { + "source": "iana", + "compressible": false + }, + "application/p2p-overlay+xml": { + "source": "iana", + "compressible": true, + "extensions": ["relo"] + }, + "application/parityfec": { + "source": "iana" + }, + "application/passport": { + "source": "iana" + }, + "application/patch-ops-error+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xer"] + }, + "application/pdf": { + "source": "iana", + "compressible": false, + "extensions": ["pdf"] + }, + "application/pdx": { + "source": "iana" + }, + "application/pem-certificate-chain": { + "source": "iana" + }, + "application/pgp-encrypted": { + "source": "iana", + "compressible": false, + "extensions": ["pgp"] + }, + "application/pgp-keys": { + "source": "iana", + "extensions": ["asc"] + }, + "application/pgp-signature": { + "source": "iana", + "extensions": ["asc","sig"] + }, + "application/pics-rules": { + "source": "apache", + "extensions": ["prf"] + }, + "application/pidf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pidf-diff+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pkcs10": { + "source": "iana", + "extensions": ["p10"] + }, + "application/pkcs12": { + "source": "iana" + }, + "application/pkcs7-mime": { + "source": "iana", + "extensions": ["p7m","p7c"] + }, + "application/pkcs7-signature": { + "source": "iana", + "extensions": ["p7s"] + }, + "application/pkcs8": { + "source": "iana", + "extensions": ["p8"] + }, + "application/pkcs8-encrypted": { + "source": "iana" + }, + "application/pkix-attr-cert": { + "source": "iana", + "extensions": ["ac"] + }, + "application/pkix-cert": { + "source": "iana", + "extensions": ["cer"] + }, + "application/pkix-crl": { + "source": "iana", + "extensions": ["crl"] + }, + "application/pkix-pkipath": { + "source": "iana", + "extensions": ["pkipath"] + }, + "application/pkixcmp": { + "source": "iana", + "extensions": ["pki"] + }, + "application/pls+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pls"] + }, + "application/poc-settings+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/postscript": { + "source": "iana", + "compressible": true, + "extensions": ["ai","eps","ps"] + }, + "application/ppsp-tracker+json": { + "source": "iana", + "compressible": true + }, + "application/problem+json": { + "source": "iana", + "compressible": true + }, + "application/problem+xml": { + "source": "iana", + "compressible": true + }, + "application/provenance+xml": { + "source": "iana", + "compressible": true, + "extensions": ["provx"] + }, + "application/prs.alvestrand.titrax-sheet": { + "source": "iana" + }, + "application/prs.cww": { + "source": "iana", + "extensions": ["cww"] + }, + "application/prs.cyn": { + "source": "iana", + "charset": "7-BIT" + }, + "application/prs.hpub+zip": { + "source": "iana", + "compressible": false + }, + "application/prs.nprend": { + "source": "iana" + }, + "application/prs.plucker": { + "source": "iana" + }, + "application/prs.rdf-xml-crypt": { + "source": "iana" + }, + "application/prs.xsf+xml": { + "source": "iana", + "compressible": true + }, + "application/pskc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pskcxml"] + }, + "application/pvd+json": { + "source": "iana", + "compressible": true + }, + "application/qsig": { + "source": "iana" + }, + "application/raml+yaml": { + "compressible": true, + "extensions": ["raml"] + }, + "application/raptorfec": { + "source": "iana" + }, + "application/rdap+json": { + "source": "iana", + "compressible": true + }, + "application/rdf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rdf","owl"] + }, + "application/reginfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rif"] + }, + "application/relax-ng-compact-syntax": { + "source": "iana", + "extensions": ["rnc"] + }, + "application/remote-printing": { + "source": "iana" + }, + "application/reputon+json": { + "source": "iana", + "compressible": true + }, + "application/resource-lists+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rl"] + }, + "application/resource-lists-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rld"] + }, + "application/rfc+xml": { + "source": "iana", + "compressible": true + }, + "application/riscos": { + "source": "iana" + }, + "application/rlmi+xml": { + "source": "iana", + "compressible": true + }, + "application/rls-services+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rs"] + }, + "application/route-apd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rapd"] + }, + "application/route-s-tsid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sls"] + }, + "application/route-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rusd"] + }, + "application/rpki-ghostbusters": { + "source": "iana", + "extensions": ["gbr"] + }, + "application/rpki-manifest": { + "source": "iana", + "extensions": ["mft"] + }, + "application/rpki-publication": { + "source": "iana" + }, + "application/rpki-roa": { + "source": "iana", + "extensions": ["roa"] + }, + "application/rpki-updown": { + "source": "iana" + }, + "application/rsd+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rsd"] + }, + "application/rss+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rss"] + }, + "application/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "application/rtploopback": { + "source": "iana" + }, + "application/rtx": { + "source": "iana" + }, + "application/samlassertion+xml": { + "source": "iana", + "compressible": true + }, + "application/samlmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/sarif+json": { + "source": "iana", + "compressible": true + }, + "application/sarif-external-properties+json": { + "source": "iana", + "compressible": true + }, + "application/sbe": { + "source": "iana" + }, + "application/sbml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sbml"] + }, + "application/scaip+xml": { + "source": "iana", + "compressible": true + }, + "application/scim+json": { + "source": "iana", + "compressible": true + }, + "application/scvp-cv-request": { + "source": "iana", + "extensions": ["scq"] + }, + "application/scvp-cv-response": { + "source": "iana", + "extensions": ["scs"] + }, + "application/scvp-vp-request": { + "source": "iana", + "extensions": ["spq"] + }, + "application/scvp-vp-response": { + "source": "iana", + "extensions": ["spp"] + }, + "application/sdp": { + "source": "iana", + "extensions": ["sdp"] + }, + "application/secevent+jwt": { + "source": "iana" + }, + "application/senml+cbor": { + "source": "iana" + }, + "application/senml+json": { + "source": "iana", + "compressible": true + }, + "application/senml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["senmlx"] + }, + "application/senml-etch+cbor": { + "source": "iana" + }, + "application/senml-etch+json": { + "source": "iana", + "compressible": true + }, + "application/senml-exi": { + "source": "iana" + }, + "application/sensml+cbor": { + "source": "iana" + }, + "application/sensml+json": { + "source": "iana", + "compressible": true + }, + "application/sensml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sensmlx"] + }, + "application/sensml-exi": { + "source": "iana" + }, + "application/sep+xml": { + "source": "iana", + "compressible": true + }, + "application/sep-exi": { + "source": "iana" + }, + "application/session-info": { + "source": "iana" + }, + "application/set-payment": { + "source": "iana" + }, + "application/set-payment-initiation": { + "source": "iana", + "extensions": ["setpay"] + }, + "application/set-registration": { + "source": "iana" + }, + "application/set-registration-initiation": { + "source": "iana", + "extensions": ["setreg"] + }, + "application/sgml": { + "source": "iana" + }, + "application/sgml-open-catalog": { + "source": "iana" + }, + "application/shf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["shf"] + }, + "application/sieve": { + "source": "iana", + "extensions": ["siv","sieve"] + }, + "application/simple-filter+xml": { + "source": "iana", + "compressible": true + }, + "application/simple-message-summary": { + "source": "iana" + }, + "application/simplesymbolcontainer": { + "source": "iana" + }, + "application/sipc": { + "source": "iana" + }, + "application/slate": { + "source": "iana" + }, + "application/smil": { + "source": "iana" + }, + "application/smil+xml": { + "source": "iana", + "compressible": true, + "extensions": ["smi","smil"] + }, + "application/smpte336m": { + "source": "iana" + }, + "application/soap+fastinfoset": { + "source": "iana" + }, + "application/soap+xml": { + "source": "iana", + "compressible": true + }, + "application/sparql-query": { + "source": "iana", + "extensions": ["rq"] + }, + "application/sparql-results+xml": { + "source": "iana", + "compressible": true, + "extensions": ["srx"] + }, + "application/spdx+json": { + "source": "iana", + "compressible": true + }, + "application/spirits-event+xml": { + "source": "iana", + "compressible": true + }, + "application/sql": { + "source": "iana" + }, + "application/srgs": { + "source": "iana", + "extensions": ["gram"] + }, + "application/srgs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["grxml"] + }, + "application/sru+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sru"] + }, + "application/ssdl+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ssdl"] + }, + "application/ssml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ssml"] + }, + "application/stix+json": { + "source": "iana", + "compressible": true + }, + "application/swid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["swidtag"] + }, + "application/tamp-apex-update": { + "source": "iana" + }, + "application/tamp-apex-update-confirm": { + "source": "iana" + }, + "application/tamp-community-update": { + "source": "iana" + }, + "application/tamp-community-update-confirm": { + "source": "iana" + }, + "application/tamp-error": { + "source": "iana" + }, + "application/tamp-sequence-adjust": { + "source": "iana" + }, + "application/tamp-sequence-adjust-confirm": { + "source": "iana" + }, + "application/tamp-status-query": { + "source": "iana" + }, + "application/tamp-status-response": { + "source": "iana" + }, + "application/tamp-update": { + "source": "iana" + }, + "application/tamp-update-confirm": { + "source": "iana" + }, + "application/tar": { + "compressible": true + }, + "application/taxii+json": { + "source": "iana", + "compressible": true + }, + "application/td+json": { + "source": "iana", + "compressible": true + }, + "application/tei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tei","teicorpus"] + }, + "application/tetra_isi": { + "source": "iana" + }, + "application/thraud+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tfi"] + }, + "application/timestamp-query": { + "source": "iana" + }, + "application/timestamp-reply": { + "source": "iana" + }, + "application/timestamped-data": { + "source": "iana", + "extensions": ["tsd"] + }, + "application/tlsrpt+gzip": { + "source": "iana" + }, + "application/tlsrpt+json": { + "source": "iana", + "compressible": true + }, + "application/tnauthlist": { + "source": "iana" + }, + "application/token-introspection+jwt": { + "source": "iana" + }, + "application/toml": { + "compressible": true, + "extensions": ["toml"] + }, + "application/trickle-ice-sdpfrag": { + "source": "iana" + }, + "application/trig": { + "source": "iana", + "extensions": ["trig"] + }, + "application/ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ttml"] + }, + "application/tve-trigger": { + "source": "iana" + }, + "application/tzif": { + "source": "iana" + }, + "application/tzif-leap": { + "source": "iana" + }, + "application/ubjson": { + "compressible": false, + "extensions": ["ubj"] + }, + "application/ulpfec": { + "source": "iana" + }, + "application/urc-grpsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-ressheet+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsheet"] + }, + "application/urc-targetdesc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["td"] + }, + "application/urc-uisocketdesc+xml": { + "source": "iana", + "compressible": true + }, + "application/vcard+json": { + "source": "iana", + "compressible": true + }, + "application/vcard+xml": { + "source": "iana", + "compressible": true + }, + "application/vemmi": { + "source": "iana" + }, + "application/vividence.scriptfile": { + "source": "apache" + }, + "application/vnd.1000minds.decision-model+xml": { + "source": "iana", + "compressible": true, + "extensions": ["1km"] + }, + "application/vnd.3gpp-prose+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-prose-pc3ch+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-v2x-local-service-information": { + "source": "iana" + }, + "application/vnd.3gpp.5gnas": { + "source": "iana" + }, + "application/vnd.3gpp.access-transfer-events+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.bsf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gmop+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gtpc": { + "source": "iana" + }, + "application/vnd.3gpp.interworking-data": { + "source": "iana" + }, + "application/vnd.3gpp.lpp": { + "source": "iana" + }, + "application/vnd.3gpp.mc-signalling-ear": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-payload": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-signalling": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-floor-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-signed+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-init-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-transmission-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mid-call+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ngap": { + "source": "iana" + }, + "application/vnd.3gpp.pfcp": { + "source": "iana" + }, + "application/vnd.3gpp.pic-bw-large": { + "source": "iana", + "extensions": ["plb"] + }, + "application/vnd.3gpp.pic-bw-small": { + "source": "iana", + "extensions": ["psb"] + }, + "application/vnd.3gpp.pic-bw-var": { + "source": "iana", + "extensions": ["pvb"] + }, + "application/vnd.3gpp.s1ap": { + "source": "iana" + }, + "application/vnd.3gpp.sms": { + "source": "iana" + }, + "application/vnd.3gpp.sms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-ext+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.state-and-event-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ussd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.bcmcsinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.sms": { + "source": "iana" + }, + "application/vnd.3gpp2.tcap": { + "source": "iana", + "extensions": ["tcap"] + }, + "application/vnd.3lightssoftware.imagescal": { + "source": "iana" + }, + "application/vnd.3m.post-it-notes": { + "source": "iana", + "extensions": ["pwn"] + }, + "application/vnd.accpac.simply.aso": { + "source": "iana", + "extensions": ["aso"] + }, + "application/vnd.accpac.simply.imp": { + "source": "iana", + "extensions": ["imp"] + }, + "application/vnd.acucobol": { + "source": "iana", + "extensions": ["acu"] + }, + "application/vnd.acucorp": { + "source": "iana", + "extensions": ["atc","acutc"] + }, + "application/vnd.adobe.air-application-installer-package+zip": { + "source": "apache", + "compressible": false, + "extensions": ["air"] + }, + "application/vnd.adobe.flash.movie": { + "source": "iana" + }, + "application/vnd.adobe.formscentral.fcdt": { + "source": "iana", + "extensions": ["fcdt"] + }, + "application/vnd.adobe.fxp": { + "source": "iana", + "extensions": ["fxp","fxpl"] + }, + "application/vnd.adobe.partial-upload": { + "source": "iana" + }, + "application/vnd.adobe.xdp+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdp"] + }, + "application/vnd.adobe.xfdf": { + "source": "iana", + "extensions": ["xfdf"] + }, + "application/vnd.aether.imp": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata-pagedef": { + "source": "iana" + }, + "application/vnd.afpc.cmoca-cmresource": { + "source": "iana" + }, + "application/vnd.afpc.foca-charset": { + "source": "iana" + }, + "application/vnd.afpc.foca-codedfont": { + "source": "iana" + }, + "application/vnd.afpc.foca-codepage": { + "source": "iana" + }, + "application/vnd.afpc.modca": { + "source": "iana" + }, + "application/vnd.afpc.modca-cmtable": { + "source": "iana" + }, + "application/vnd.afpc.modca-formdef": { + "source": "iana" + }, + "application/vnd.afpc.modca-mediummap": { + "source": "iana" + }, + "application/vnd.afpc.modca-objectcontainer": { + "source": "iana" + }, + "application/vnd.afpc.modca-overlay": { + "source": "iana" + }, + "application/vnd.afpc.modca-pagesegment": { + "source": "iana" + }, + "application/vnd.age": { + "source": "iana", + "extensions": ["age"] + }, + "application/vnd.ah-barcode": { + "source": "iana" + }, + "application/vnd.ahead.space": { + "source": "iana", + "extensions": ["ahead"] + }, + "application/vnd.airzip.filesecure.azf": { + "source": "iana", + "extensions": ["azf"] + }, + "application/vnd.airzip.filesecure.azs": { + "source": "iana", + "extensions": ["azs"] + }, + "application/vnd.amadeus+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.amazon.ebook": { + "source": "apache", + "extensions": ["azw"] + }, + "application/vnd.amazon.mobi8-ebook": { + "source": "iana" + }, + "application/vnd.americandynamics.acc": { + "source": "iana", + "extensions": ["acc"] + }, + "application/vnd.amiga.ami": { + "source": "iana", + "extensions": ["ami"] + }, + "application/vnd.amundsen.maze+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.android.ota": { + "source": "iana" + }, + "application/vnd.android.package-archive": { + "source": "apache", + "compressible": false, + "extensions": ["apk"] + }, + "application/vnd.anki": { + "source": "iana" + }, + "application/vnd.anser-web-certificate-issue-initiation": { + "source": "iana", + "extensions": ["cii"] + }, + "application/vnd.anser-web-funds-transfer-initiation": { + "source": "apache", + "extensions": ["fti"] + }, + "application/vnd.antix.game-component": { + "source": "iana", + "extensions": ["atx"] + }, + "application/vnd.apache.arrow.file": { + "source": "iana" + }, + "application/vnd.apache.arrow.stream": { + "source": "iana" + }, + "application/vnd.apache.thrift.binary": { + "source": "iana" + }, + "application/vnd.apache.thrift.compact": { + "source": "iana" + }, + "application/vnd.apache.thrift.json": { + "source": "iana" + }, + "application/vnd.api+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.aplextor.warrp+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apothekende.reservation+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apple.installer+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpkg"] + }, + "application/vnd.apple.keynote": { + "source": "iana", + "extensions": ["key"] + }, + "application/vnd.apple.mpegurl": { + "source": "iana", + "extensions": ["m3u8"] + }, + "application/vnd.apple.numbers": { + "source": "iana", + "extensions": ["numbers"] + }, + "application/vnd.apple.pages": { + "source": "iana", + "extensions": ["pages"] + }, + "application/vnd.apple.pkpass": { + "compressible": false, + "extensions": ["pkpass"] + }, + "application/vnd.arastra.swi": { + "source": "iana" + }, + "application/vnd.aristanetworks.swi": { + "source": "iana", + "extensions": ["swi"] + }, + "application/vnd.artisan+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.artsquare": { + "source": "iana" + }, + "application/vnd.astraea-software.iota": { + "source": "iana", + "extensions": ["iota"] + }, + "application/vnd.audiograph": { + "source": "iana", + "extensions": ["aep"] + }, + "application/vnd.autopackage": { + "source": "iana" + }, + "application/vnd.avalon+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.avistar+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.balsamiq.bmml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["bmml"] + }, + "application/vnd.balsamiq.bmpr": { + "source": "iana" + }, + "application/vnd.banana-accounting": { + "source": "iana" + }, + "application/vnd.bbf.usp.error": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bekitzur-stech+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bint.med-content": { + "source": "iana" + }, + "application/vnd.biopax.rdf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.blink-idb-value-wrapper": { + "source": "iana" + }, + "application/vnd.blueice.multipass": { + "source": "iana", + "extensions": ["mpm"] + }, + "application/vnd.bluetooth.ep.oob": { + "source": "iana" + }, + "application/vnd.bluetooth.le.oob": { + "source": "iana" + }, + "application/vnd.bmi": { + "source": "iana", + "extensions": ["bmi"] + }, + "application/vnd.bpf": { + "source": "iana" + }, + "application/vnd.bpf3": { + "source": "iana" + }, + "application/vnd.businessobjects": { + "source": "iana", + "extensions": ["rep"] + }, + "application/vnd.byu.uapi+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cab-jscript": { + "source": "iana" + }, + "application/vnd.canon-cpdl": { + "source": "iana" + }, + "application/vnd.canon-lips": { + "source": "iana" + }, + "application/vnd.capasystems-pg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cendio.thinlinc.clientconf": { + "source": "iana" + }, + "application/vnd.century-systems.tcp_stream": { + "source": "iana" + }, + "application/vnd.chemdraw+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdxml"] + }, + "application/vnd.chess-pgn": { + "source": "iana" + }, + "application/vnd.chipnuts.karaoke-mmd": { + "source": "iana", + "extensions": ["mmd"] + }, + "application/vnd.ciedi": { + "source": "iana" + }, + "application/vnd.cinderella": { + "source": "iana", + "extensions": ["cdy"] + }, + "application/vnd.cirpack.isdn-ext": { + "source": "iana" + }, + "application/vnd.citationstyles.style+xml": { + "source": "iana", + "compressible": true, + "extensions": ["csl"] + }, + "application/vnd.claymore": { + "source": "iana", + "extensions": ["cla"] + }, + "application/vnd.cloanto.rp9": { + "source": "iana", + "extensions": ["rp9"] + }, + "application/vnd.clonk.c4group": { + "source": "iana", + "extensions": ["c4g","c4d","c4f","c4p","c4u"] + }, + "application/vnd.cluetrust.cartomobile-config": { + "source": "iana", + "extensions": ["c11amc"] + }, + "application/vnd.cluetrust.cartomobile-config-pkg": { + "source": "iana", + "extensions": ["c11amz"] + }, + "application/vnd.coffeescript": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet-template": { + "source": "iana" + }, + "application/vnd.collection+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.doc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.next+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.comicbook+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.comicbook-rar": { + "source": "iana" + }, + "application/vnd.commerce-battelle": { + "source": "iana" + }, + "application/vnd.commonspace": { + "source": "iana", + "extensions": ["csp"] + }, + "application/vnd.contact.cmsg": { + "source": "iana", + "extensions": ["cdbcmsg"] + }, + "application/vnd.coreos.ignition+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cosmocaller": { + "source": "iana", + "extensions": ["cmc"] + }, + "application/vnd.crick.clicker": { + "source": "iana", + "extensions": ["clkx"] + }, + "application/vnd.crick.clicker.keyboard": { + "source": "iana", + "extensions": ["clkk"] + }, + "application/vnd.crick.clicker.palette": { + "source": "iana", + "extensions": ["clkp"] + }, + "application/vnd.crick.clicker.template": { + "source": "iana", + "extensions": ["clkt"] + }, + "application/vnd.crick.clicker.wordbank": { + "source": "iana", + "extensions": ["clkw"] + }, + "application/vnd.criticaltools.wbs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wbs"] + }, + "application/vnd.cryptii.pipe+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.crypto-shade-file": { + "source": "iana" + }, + "application/vnd.cryptomator.encrypted": { + "source": "iana" + }, + "application/vnd.cryptomator.vault": { + "source": "iana" + }, + "application/vnd.ctc-posml": { + "source": "iana", + "extensions": ["pml"] + }, + "application/vnd.ctct.ws+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cups-pdf": { + "source": "iana" + }, + "application/vnd.cups-postscript": { + "source": "iana" + }, + "application/vnd.cups-ppd": { + "source": "iana", + "extensions": ["ppd"] + }, + "application/vnd.cups-raster": { + "source": "iana" + }, + "application/vnd.cups-raw": { + "source": "iana" + }, + "application/vnd.curl": { + "source": "iana" + }, + "application/vnd.curl.car": { + "source": "apache", + "extensions": ["car"] + }, + "application/vnd.curl.pcurl": { + "source": "apache", + "extensions": ["pcurl"] + }, + "application/vnd.cyan.dean.root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cybank": { + "source": "iana" + }, + "application/vnd.cyclonedx+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cyclonedx+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.d2l.coursepackage1p0+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.d3m-dataset": { + "source": "iana" + }, + "application/vnd.d3m-problem": { + "source": "iana" + }, + "application/vnd.dart": { + "source": "iana", + "compressible": true, + "extensions": ["dart"] + }, + "application/vnd.data-vision.rdz": { + "source": "iana", + "extensions": ["rdz"] + }, + "application/vnd.datapackage+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dataresource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dbf": { + "source": "iana", + "extensions": ["dbf"] + }, + "application/vnd.debian.binary-package": { + "source": "iana" + }, + "application/vnd.dece.data": { + "source": "iana", + "extensions": ["uvf","uvvf","uvd","uvvd"] + }, + "application/vnd.dece.ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uvt","uvvt"] + }, + "application/vnd.dece.unspecified": { + "source": "iana", + "extensions": ["uvx","uvvx"] + }, + "application/vnd.dece.zip": { + "source": "iana", + "extensions": ["uvz","uvvz"] + }, + "application/vnd.denovo.fcselayout-link": { + "source": "iana", + "extensions": ["fe_launch"] + }, + "application/vnd.desmume.movie": { + "source": "iana" + }, + "application/vnd.dir-bi.plate-dl-nosuffix": { + "source": "iana" + }, + "application/vnd.dm.delegation+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dna": { + "source": "iana", + "extensions": ["dna"] + }, + "application/vnd.document+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dolby.mlp": { + "source": "apache", + "extensions": ["mlp"] + }, + "application/vnd.dolby.mobile.1": { + "source": "iana" + }, + "application/vnd.dolby.mobile.2": { + "source": "iana" + }, + "application/vnd.doremir.scorecloud-binary-document": { + "source": "iana" + }, + "application/vnd.dpgraph": { + "source": "iana", + "extensions": ["dpg"] + }, + "application/vnd.dreamfactory": { + "source": "iana", + "extensions": ["dfac"] + }, + "application/vnd.drive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ds-keypoint": { + "source": "apache", + "extensions": ["kpxx"] + }, + "application/vnd.dtg.local": { + "source": "iana" + }, + "application/vnd.dtg.local.flash": { + "source": "iana" + }, + "application/vnd.dtg.local.html": { + "source": "iana" + }, + "application/vnd.dvb.ait": { + "source": "iana", + "extensions": ["ait"] + }, + "application/vnd.dvb.dvbisl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.dvbj": { + "source": "iana" + }, + "application/vnd.dvb.esgcontainer": { + "source": "iana" + }, + "application/vnd.dvb.ipdcdftnotifaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess2": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgpdd": { + "source": "iana" + }, + "application/vnd.dvb.ipdcroaming": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-base": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-enhancement": { + "source": "iana" + }, + "application/vnd.dvb.notif-aggregate-root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-container+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-generic+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-msglist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-response+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-init+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.pfr": { + "source": "iana" + }, + "application/vnd.dvb.service": { + "source": "iana", + "extensions": ["svc"] + }, + "application/vnd.dxr": { + "source": "iana" + }, + "application/vnd.dynageo": { + "source": "iana", + "extensions": ["geo"] + }, + "application/vnd.dzr": { + "source": "iana" + }, + "application/vnd.easykaraoke.cdgdownload": { + "source": "iana" + }, + "application/vnd.ecdis-update": { + "source": "iana" + }, + "application/vnd.ecip.rlp": { + "source": "iana" + }, + "application/vnd.eclipse.ditto+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ecowin.chart": { + "source": "iana", + "extensions": ["mag"] + }, + "application/vnd.ecowin.filerequest": { + "source": "iana" + }, + "application/vnd.ecowin.fileupdate": { + "source": "iana" + }, + "application/vnd.ecowin.series": { + "source": "iana" + }, + "application/vnd.ecowin.seriesrequest": { + "source": "iana" + }, + "application/vnd.ecowin.seriesupdate": { + "source": "iana" + }, + "application/vnd.efi.img": { + "source": "iana" + }, + "application/vnd.efi.iso": { + "source": "iana" + }, + "application/vnd.emclient.accessrequest+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.enliven": { + "source": "iana", + "extensions": ["nml"] + }, + "application/vnd.enphase.envoy": { + "source": "iana" + }, + "application/vnd.eprints.data+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.epson.esf": { + "source": "iana", + "extensions": ["esf"] + }, + "application/vnd.epson.msf": { + "source": "iana", + "extensions": ["msf"] + }, + "application/vnd.epson.quickanime": { + "source": "iana", + "extensions": ["qam"] + }, + "application/vnd.epson.salt": { + "source": "iana", + "extensions": ["slt"] + }, + "application/vnd.epson.ssf": { + "source": "iana", + "extensions": ["ssf"] + }, + "application/vnd.ericsson.quickcall": { + "source": "iana" + }, + "application/vnd.espass-espass+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.eszigno3+xml": { + "source": "iana", + "compressible": true, + "extensions": ["es3","et3"] + }, + "application/vnd.etsi.aoc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.asic-e+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.asic-s+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.cug+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvcommand+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-bc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-cod+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-npvr+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvservice+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsync+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mcid+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mheg5": { + "source": "iana" + }, + "application/vnd.etsi.overload-control-policy-dataset+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.pstn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.sci+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.simservs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.timestamp-token": { + "source": "iana" + }, + "application/vnd.etsi.tsl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.tsl.der": { + "source": "iana" + }, + "application/vnd.eu.kasparian.car+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.eudora.data": { + "source": "iana" + }, + "application/vnd.evolv.ecig.profile": { + "source": "iana" + }, + "application/vnd.evolv.ecig.settings": { + "source": "iana" + }, + "application/vnd.evolv.ecig.theme": { + "source": "iana" + }, + "application/vnd.exstream-empower+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.exstream-package": { + "source": "iana" + }, + "application/vnd.ezpix-album": { + "source": "iana", + "extensions": ["ez2"] + }, + "application/vnd.ezpix-package": { + "source": "iana", + "extensions": ["ez3"] + }, + "application/vnd.f-secure.mobile": { + "source": "iana" + }, + "application/vnd.familysearch.gedcom+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.fastcopy-disk-image": { + "source": "iana" + }, + "application/vnd.fdf": { + "source": "iana", + "extensions": ["fdf"] + }, + "application/vnd.fdsn.mseed": { + "source": "iana", + "extensions": ["mseed"] + }, + "application/vnd.fdsn.seed": { + "source": "iana", + "extensions": ["seed","dataless"] + }, + "application/vnd.ffsns": { + "source": "iana" + }, + "application/vnd.ficlab.flb+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.filmit.zfc": { + "source": "iana" + }, + "application/vnd.fints": { + "source": "iana" + }, + "application/vnd.firemonkeys.cloudcell": { + "source": "iana" + }, + "application/vnd.flographit": { + "source": "iana", + "extensions": ["gph"] + }, + "application/vnd.fluxtime.clip": { + "source": "iana", + "extensions": ["ftc"] + }, + "application/vnd.font-fontforge-sfd": { + "source": "iana" + }, + "application/vnd.framemaker": { + "source": "iana", + "extensions": ["fm","frame","maker","book"] + }, + "application/vnd.frogans.fnc": { + "source": "iana", + "extensions": ["fnc"] + }, + "application/vnd.frogans.ltf": { + "source": "iana", + "extensions": ["ltf"] + }, + "application/vnd.fsc.weblaunch": { + "source": "iana", + "extensions": ["fsc"] + }, + "application/vnd.fujifilm.fb.docuworks": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.binder": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.jfi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.fujitsu.oasys": { + "source": "iana", + "extensions": ["oas"] + }, + "application/vnd.fujitsu.oasys2": { + "source": "iana", + "extensions": ["oa2"] + }, + "application/vnd.fujitsu.oasys3": { + "source": "iana", + "extensions": ["oa3"] + }, + "application/vnd.fujitsu.oasysgp": { + "source": "iana", + "extensions": ["fg5"] + }, + "application/vnd.fujitsu.oasysprs": { + "source": "iana", + "extensions": ["bh2"] + }, + "application/vnd.fujixerox.art-ex": { + "source": "iana" + }, + "application/vnd.fujixerox.art4": { + "source": "iana" + }, + "application/vnd.fujixerox.ddd": { + "source": "iana", + "extensions": ["ddd"] + }, + "application/vnd.fujixerox.docuworks": { + "source": "iana", + "extensions": ["xdw"] + }, + "application/vnd.fujixerox.docuworks.binder": { + "source": "iana", + "extensions": ["xbd"] + }, + "application/vnd.fujixerox.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujixerox.hbpl": { + "source": "iana" + }, + "application/vnd.fut-misnet": { + "source": "iana" + }, + "application/vnd.futoin+cbor": { + "source": "iana" + }, + "application/vnd.futoin+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.fuzzysheet": { + "source": "iana", + "extensions": ["fzs"] + }, + "application/vnd.genomatix.tuxedo": { + "source": "iana", + "extensions": ["txd"] + }, + "application/vnd.gentics.grd+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geo+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geocube+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.geogebra.file": { + "source": "iana", + "extensions": ["ggb"] + }, + "application/vnd.geogebra.slides": { + "source": "iana" + }, + "application/vnd.geogebra.tool": { + "source": "iana", + "extensions": ["ggt"] + }, + "application/vnd.geometry-explorer": { + "source": "iana", + "extensions": ["gex","gre"] + }, + "application/vnd.geonext": { + "source": "iana", + "extensions": ["gxt"] + }, + "application/vnd.geoplan": { + "source": "iana", + "extensions": ["g2w"] + }, + "application/vnd.geospace": { + "source": "iana", + "extensions": ["g3w"] + }, + "application/vnd.gerber": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt-response": { + "source": "iana" + }, + "application/vnd.gmx": { + "source": "iana", + "extensions": ["gmx"] + }, + "application/vnd.google-apps.document": { + "compressible": false, + "extensions": ["gdoc"] + }, + "application/vnd.google-apps.presentation": { + "compressible": false, + "extensions": ["gslides"] + }, + "application/vnd.google-apps.spreadsheet": { + "compressible": false, + "extensions": ["gsheet"] + }, + "application/vnd.google-earth.kml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["kml"] + }, + "application/vnd.google-earth.kmz": { + "source": "iana", + "compressible": false, + "extensions": ["kmz"] + }, + "application/vnd.gov.sk.e-form+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.gov.sk.e-form+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.gov.sk.xmldatacontainer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.grafeq": { + "source": "iana", + "extensions": ["gqf","gqs"] + }, + "application/vnd.gridmp": { + "source": "iana" + }, + "application/vnd.groove-account": { + "source": "iana", + "extensions": ["gac"] + }, + "application/vnd.groove-help": { + "source": "iana", + "extensions": ["ghf"] + }, + "application/vnd.groove-identity-message": { + "source": "iana", + "extensions": ["gim"] + }, + "application/vnd.groove-injector": { + "source": "iana", + "extensions": ["grv"] + }, + "application/vnd.groove-tool-message": { + "source": "iana", + "extensions": ["gtm"] + }, + "application/vnd.groove-tool-template": { + "source": "iana", + "extensions": ["tpl"] + }, + "application/vnd.groove-vcard": { + "source": "iana", + "extensions": ["vcg"] + }, + "application/vnd.hal+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hal+xml": { + "source": "iana", + "compressible": true, + "extensions": ["hal"] + }, + "application/vnd.handheld-entertainment+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zmm"] + }, + "application/vnd.hbci": { + "source": "iana", + "extensions": ["hbci"] + }, + "application/vnd.hc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hcl-bireports": { + "source": "iana" + }, + "application/vnd.hdt": { + "source": "iana" + }, + "application/vnd.heroku+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hhe.lesson-player": { + "source": "iana", + "extensions": ["les"] + }, + "application/vnd.hl7cda+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hl7v2+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hp-hpgl": { + "source": "iana", + "extensions": ["hpgl"] + }, + "application/vnd.hp-hpid": { + "source": "iana", + "extensions": ["hpid"] + }, + "application/vnd.hp-hps": { + "source": "iana", + "extensions": ["hps"] + }, + "application/vnd.hp-jlyt": { + "source": "iana", + "extensions": ["jlt"] + }, + "application/vnd.hp-pcl": { + "source": "iana", + "extensions": ["pcl"] + }, + "application/vnd.hp-pclxl": { + "source": "iana", + "extensions": ["pclxl"] + }, + "application/vnd.httphone": { + "source": "iana" + }, + "application/vnd.hydrostatix.sof-data": { + "source": "iana", + "extensions": ["sfd-hdstx"] + }, + "application/vnd.hyper+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyper-item+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyperdrive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hzn-3d-crossword": { + "source": "iana" + }, + "application/vnd.ibm.afplinedata": { + "source": "iana" + }, + "application/vnd.ibm.electronic-media": { + "source": "iana" + }, + "application/vnd.ibm.minipay": { + "source": "iana", + "extensions": ["mpy"] + }, + "application/vnd.ibm.modcap": { + "source": "iana", + "extensions": ["afp","listafp","list3820"] + }, + "application/vnd.ibm.rights-management": { + "source": "iana", + "extensions": ["irm"] + }, + "application/vnd.ibm.secure-container": { + "source": "iana", + "extensions": ["sc"] + }, + "application/vnd.iccprofile": { + "source": "iana", + "extensions": ["icc","icm"] + }, + "application/vnd.ieee.1905": { + "source": "iana" + }, + "application/vnd.igloader": { + "source": "iana", + "extensions": ["igl"] + }, + "application/vnd.imagemeter.folder+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.imagemeter.image+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.immervision-ivp": { + "source": "iana", + "extensions": ["ivp"] + }, + "application/vnd.immervision-ivu": { + "source": "iana", + "extensions": ["ivu"] + }, + "application/vnd.ims.imsccv1p1": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p2": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p3": { + "source": "iana" + }, + "application/vnd.ims.lis.v2.result+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolconsumerprofile+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy.id+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings.simple+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.informedcontrol.rms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.informix-visionary": { + "source": "iana" + }, + "application/vnd.infotech.project": { + "source": "iana" + }, + "application/vnd.infotech.project+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.innopath.wamp.notification": { + "source": "iana" + }, + "application/vnd.insors.igm": { + "source": "iana", + "extensions": ["igm"] + }, + "application/vnd.intercon.formnet": { + "source": "iana", + "extensions": ["xpw","xpx"] + }, + "application/vnd.intergeo": { + "source": "iana", + "extensions": ["i2g"] + }, + "application/vnd.intertrust.digibox": { + "source": "iana" + }, + "application/vnd.intertrust.nncp": { + "source": "iana" + }, + "application/vnd.intu.qbo": { + "source": "iana", + "extensions": ["qbo"] + }, + "application/vnd.intu.qfx": { + "source": "iana", + "extensions": ["qfx"] + }, + "application/vnd.iptc.g2.catalogitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.conceptitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.knowledgeitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.packageitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.planningitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ipunplugged.rcprofile": { + "source": "iana", + "extensions": ["rcprofile"] + }, + "application/vnd.irepository.package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["irp"] + }, + "application/vnd.is-xpr": { + "source": "iana", + "extensions": ["xpr"] + }, + "application/vnd.isac.fcs": { + "source": "iana", + "extensions": ["fcs"] + }, + "application/vnd.iso11783-10+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.jam": { + "source": "iana", + "extensions": ["jam"] + }, + "application/vnd.japannet-directory-service": { + "source": "iana" + }, + "application/vnd.japannet-jpnstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-payment-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-registration": { + "source": "iana" + }, + "application/vnd.japannet-registration-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-setstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-verification": { + "source": "iana" + }, + "application/vnd.japannet-verification-wakeup": { + "source": "iana" + }, + "application/vnd.jcp.javame.midlet-rms": { + "source": "iana", + "extensions": ["rms"] + }, + "application/vnd.jisp": { + "source": "iana", + "extensions": ["jisp"] + }, + "application/vnd.joost.joda-archive": { + "source": "iana", + "extensions": ["joda"] + }, + "application/vnd.jsk.isdn-ngn": { + "source": "iana" + }, + "application/vnd.kahootz": { + "source": "iana", + "extensions": ["ktz","ktr"] + }, + "application/vnd.kde.karbon": { + "source": "iana", + "extensions": ["karbon"] + }, + "application/vnd.kde.kchart": { + "source": "iana", + "extensions": ["chrt"] + }, + "application/vnd.kde.kformula": { + "source": "iana", + "extensions": ["kfo"] + }, + "application/vnd.kde.kivio": { + "source": "iana", + "extensions": ["flw"] + }, + "application/vnd.kde.kontour": { + "source": "iana", + "extensions": ["kon"] + }, + "application/vnd.kde.kpresenter": { + "source": "iana", + "extensions": ["kpr","kpt"] + }, + "application/vnd.kde.kspread": { + "source": "iana", + "extensions": ["ksp"] + }, + "application/vnd.kde.kword": { + "source": "iana", + "extensions": ["kwd","kwt"] + }, + "application/vnd.kenameaapp": { + "source": "iana", + "extensions": ["htke"] + }, + "application/vnd.kidspiration": { + "source": "iana", + "extensions": ["kia"] + }, + "application/vnd.kinar": { + "source": "iana", + "extensions": ["kne","knp"] + }, + "application/vnd.koan": { + "source": "iana", + "extensions": ["skp","skd","skt","skm"] + }, + "application/vnd.kodak-descriptor": { + "source": "iana", + "extensions": ["sse"] + }, + "application/vnd.las": { + "source": "iana" + }, + "application/vnd.las.las+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.las.las+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lasxml"] + }, + "application/vnd.laszip": { + "source": "iana" + }, + "application/vnd.leap+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.liberty-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.llamagraphics.life-balance.desktop": { + "source": "iana", + "extensions": ["lbd"] + }, + "application/vnd.llamagraphics.life-balance.exchange+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lbe"] + }, + "application/vnd.logipipe.circuit+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.loom": { + "source": "iana" + }, + "application/vnd.lotus-1-2-3": { + "source": "iana", + "extensions": ["123"] + }, + "application/vnd.lotus-approach": { + "source": "iana", + "extensions": ["apr"] + }, + "application/vnd.lotus-freelance": { + "source": "iana", + "extensions": ["pre"] + }, + "application/vnd.lotus-notes": { + "source": "iana", + "extensions": ["nsf"] + }, + "application/vnd.lotus-organizer": { + "source": "iana", + "extensions": ["org"] + }, + "application/vnd.lotus-screencam": { + "source": "iana", + "extensions": ["scm"] + }, + "application/vnd.lotus-wordpro": { + "source": "iana", + "extensions": ["lwp"] + }, + "application/vnd.macports.portpkg": { + "source": "iana", + "extensions": ["portpkg"] + }, + "application/vnd.mapbox-vector-tile": { + "source": "iana", + "extensions": ["mvt"] + }, + "application/vnd.marlin.drm.actiontoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.conftoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.license+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.mdcf": { + "source": "iana" + }, + "application/vnd.mason+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.maxar.archive.3tz+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.maxmind.maxmind-db": { + "source": "iana" + }, + "application/vnd.mcd": { + "source": "iana", + "extensions": ["mcd"] + }, + "application/vnd.medcalcdata": { + "source": "iana", + "extensions": ["mc1"] + }, + "application/vnd.mediastation.cdkey": { + "source": "iana", + "extensions": ["cdkey"] + }, + "application/vnd.meridian-slingshot": { + "source": "iana" + }, + "application/vnd.mfer": { + "source": "iana", + "extensions": ["mwf"] + }, + "application/vnd.mfmp": { + "source": "iana", + "extensions": ["mfm"] + }, + "application/vnd.micro+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.micrografx.flo": { + "source": "iana", + "extensions": ["flo"] + }, + "application/vnd.micrografx.igx": { + "source": "iana", + "extensions": ["igx"] + }, + "application/vnd.microsoft.portable-executable": { + "source": "iana" + }, + "application/vnd.microsoft.windows.thumbnail-cache": { + "source": "iana" + }, + "application/vnd.miele+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.mif": { + "source": "iana", + "extensions": ["mif"] + }, + "application/vnd.minisoft-hp3000-save": { + "source": "iana" + }, + "application/vnd.mitsubishi.misty-guard.trustweb": { + "source": "iana" + }, + "application/vnd.mobius.daf": { + "source": "iana", + "extensions": ["daf"] + }, + "application/vnd.mobius.dis": { + "source": "iana", + "extensions": ["dis"] + }, + "application/vnd.mobius.mbk": { + "source": "iana", + "extensions": ["mbk"] + }, + "application/vnd.mobius.mqy": { + "source": "iana", + "extensions": ["mqy"] + }, + "application/vnd.mobius.msl": { + "source": "iana", + "extensions": ["msl"] + }, + "application/vnd.mobius.plc": { + "source": "iana", + "extensions": ["plc"] + }, + "application/vnd.mobius.txf": { + "source": "iana", + "extensions": ["txf"] + }, + "application/vnd.mophun.application": { + "source": "iana", + "extensions": ["mpn"] + }, + "application/vnd.mophun.certificate": { + "source": "iana", + "extensions": ["mpc"] + }, + "application/vnd.motorola.flexsuite": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.adsi": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.fis": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.gotap": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.kmr": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.ttc": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.wem": { + "source": "iana" + }, + "application/vnd.motorola.iprm": { + "source": "iana" + }, + "application/vnd.mozilla.xul+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xul"] + }, + "application/vnd.ms-3mfdocument": { + "source": "iana" + }, + "application/vnd.ms-artgalry": { + "source": "iana", + "extensions": ["cil"] + }, + "application/vnd.ms-asf": { + "source": "iana" + }, + "application/vnd.ms-cab-compressed": { + "source": "iana", + "extensions": ["cab"] + }, + "application/vnd.ms-color.iccprofile": { + "source": "apache" + }, + "application/vnd.ms-excel": { + "source": "iana", + "compressible": false, + "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] + }, + "application/vnd.ms-excel.addin.macroenabled.12": { + "source": "iana", + "extensions": ["xlam"] + }, + "application/vnd.ms-excel.sheet.binary.macroenabled.12": { + "source": "iana", + "extensions": ["xlsb"] + }, + "application/vnd.ms-excel.sheet.macroenabled.12": { + "source": "iana", + "extensions": ["xlsm"] + }, + "application/vnd.ms-excel.template.macroenabled.12": { + "source": "iana", + "extensions": ["xltm"] + }, + "application/vnd.ms-fontobject": { + "source": "iana", + "compressible": true, + "extensions": ["eot"] + }, + "application/vnd.ms-htmlhelp": { + "source": "iana", + "extensions": ["chm"] + }, + "application/vnd.ms-ims": { + "source": "iana", + "extensions": ["ims"] + }, + "application/vnd.ms-lrm": { + "source": "iana", + "extensions": ["lrm"] + }, + "application/vnd.ms-office.activex+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-officetheme": { + "source": "iana", + "extensions": ["thmx"] + }, + "application/vnd.ms-opentype": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-outlook": { + "compressible": false, + "extensions": ["msg"] + }, + "application/vnd.ms-package.obfuscated-opentype": { + "source": "apache" + }, + "application/vnd.ms-pki.seccat": { + "source": "apache", + "extensions": ["cat"] + }, + "application/vnd.ms-pki.stl": { + "source": "apache", + "extensions": ["stl"] + }, + "application/vnd.ms-playready.initiator+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-powerpoint": { + "source": "iana", + "compressible": false, + "extensions": ["ppt","pps","pot"] + }, + "application/vnd.ms-powerpoint.addin.macroenabled.12": { + "source": "iana", + "extensions": ["ppam"] + }, + "application/vnd.ms-powerpoint.presentation.macroenabled.12": { + "source": "iana", + "extensions": ["pptm"] + }, + "application/vnd.ms-powerpoint.slide.macroenabled.12": { + "source": "iana", + "extensions": ["sldm"] + }, + "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { + "source": "iana", + "extensions": ["ppsm"] + }, + "application/vnd.ms-powerpoint.template.macroenabled.12": { + "source": "iana", + "extensions": ["potm"] + }, + "application/vnd.ms-printdevicecapabilities+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-printing.printticket+xml": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-printschematicket+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-project": { + "source": "iana", + "extensions": ["mpp","mpt"] + }, + "application/vnd.ms-tnef": { + "source": "iana" + }, + "application/vnd.ms-windows.devicepairing": { + "source": "iana" + }, + "application/vnd.ms-windows.nwprinting.oob": { + "source": "iana" + }, + "application/vnd.ms-windows.printerpairing": { + "source": "iana" + }, + "application/vnd.ms-windows.wsd.oob": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-resp": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-resp": { + "source": "iana" + }, + "application/vnd.ms-word.document.macroenabled.12": { + "source": "iana", + "extensions": ["docm"] + }, + "application/vnd.ms-word.template.macroenabled.12": { + "source": "iana", + "extensions": ["dotm"] + }, + "application/vnd.ms-works": { + "source": "iana", + "extensions": ["wps","wks","wcm","wdb"] + }, + "application/vnd.ms-wpl": { + "source": "iana", + "extensions": ["wpl"] + }, + "application/vnd.ms-xpsdocument": { + "source": "iana", + "compressible": false, + "extensions": ["xps"] + }, + "application/vnd.msa-disk-image": { + "source": "iana" + }, + "application/vnd.mseq": { + "source": "iana", + "extensions": ["mseq"] + }, + "application/vnd.msign": { + "source": "iana" + }, + "application/vnd.multiad.creator": { + "source": "iana" + }, + "application/vnd.multiad.creator.cif": { + "source": "iana" + }, + "application/vnd.music-niff": { + "source": "iana" + }, + "application/vnd.musician": { + "source": "iana", + "extensions": ["mus"] + }, + "application/vnd.muvee.style": { + "source": "iana", + "extensions": ["msty"] + }, + "application/vnd.mynfc": { + "source": "iana", + "extensions": ["taglet"] + }, + "application/vnd.nacamar.ybrid+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ncd.control": { + "source": "iana" + }, + "application/vnd.ncd.reference": { + "source": "iana" + }, + "application/vnd.nearst.inv+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.nebumind.line": { + "source": "iana" + }, + "application/vnd.nervana": { + "source": "iana" + }, + "application/vnd.netfpx": { + "source": "iana" + }, + "application/vnd.neurolanguage.nlu": { + "source": "iana", + "extensions": ["nlu"] + }, + "application/vnd.nimn": { + "source": "iana" + }, + "application/vnd.nintendo.nitro.rom": { + "source": "iana" + }, + "application/vnd.nintendo.snes.rom": { + "source": "iana" + }, + "application/vnd.nitf": { + "source": "iana", + "extensions": ["ntf","nitf"] + }, + "application/vnd.noblenet-directory": { + "source": "iana", + "extensions": ["nnd"] + }, + "application/vnd.noblenet-sealer": { + "source": "iana", + "extensions": ["nns"] + }, + "application/vnd.noblenet-web": { + "source": "iana", + "extensions": ["nnw"] + }, + "application/vnd.nokia.catalogs": { + "source": "iana" + }, + "application/vnd.nokia.conml+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.conml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.iptv.config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.isds-radio-presets": { + "source": "iana" + }, + "application/vnd.nokia.landmark+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.landmark+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.landmarkcollection+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.n-gage.ac+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ac"] + }, + "application/vnd.nokia.n-gage.data": { + "source": "iana", + "extensions": ["ngdat"] + }, + "application/vnd.nokia.n-gage.symbian.install": { + "source": "iana", + "extensions": ["n-gage"] + }, + "application/vnd.nokia.ncd": { + "source": "iana" + }, + "application/vnd.nokia.pcd+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.pcd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.radio-preset": { + "source": "iana", + "extensions": ["rpst"] + }, + "application/vnd.nokia.radio-presets": { + "source": "iana", + "extensions": ["rpss"] + }, + "application/vnd.novadigm.edm": { + "source": "iana", + "extensions": ["edm"] + }, + "application/vnd.novadigm.edx": { + "source": "iana", + "extensions": ["edx"] + }, + "application/vnd.novadigm.ext": { + "source": "iana", + "extensions": ["ext"] + }, + "application/vnd.ntt-local.content-share": { + "source": "iana" + }, + "application/vnd.ntt-local.file-transfer": { + "source": "iana" + }, + "application/vnd.ntt-local.ogw_remote-access": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_remote": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_tcp_stream": { + "source": "iana" + }, + "application/vnd.oasis.opendocument.chart": { + "source": "iana", + "extensions": ["odc"] + }, + "application/vnd.oasis.opendocument.chart-template": { + "source": "iana", + "extensions": ["otc"] + }, + "application/vnd.oasis.opendocument.database": { + "source": "iana", + "extensions": ["odb"] + }, + "application/vnd.oasis.opendocument.formula": { + "source": "iana", + "extensions": ["odf"] + }, + "application/vnd.oasis.opendocument.formula-template": { + "source": "iana", + "extensions": ["odft"] + }, + "application/vnd.oasis.opendocument.graphics": { + "source": "iana", + "compressible": false, + "extensions": ["odg"] + }, + "application/vnd.oasis.opendocument.graphics-template": { + "source": "iana", + "extensions": ["otg"] + }, + "application/vnd.oasis.opendocument.image": { + "source": "iana", + "extensions": ["odi"] + }, + "application/vnd.oasis.opendocument.image-template": { + "source": "iana", + "extensions": ["oti"] + }, + "application/vnd.oasis.opendocument.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["odp"] + }, + "application/vnd.oasis.opendocument.presentation-template": { + "source": "iana", + "extensions": ["otp"] + }, + "application/vnd.oasis.opendocument.spreadsheet": { + "source": "iana", + "compressible": false, + "extensions": ["ods"] + }, + "application/vnd.oasis.opendocument.spreadsheet-template": { + "source": "iana", + "extensions": ["ots"] + }, + "application/vnd.oasis.opendocument.text": { + "source": "iana", + "compressible": false, + "extensions": ["odt"] + }, + "application/vnd.oasis.opendocument.text-master": { + "source": "iana", + "extensions": ["odm"] + }, + "application/vnd.oasis.opendocument.text-template": { + "source": "iana", + "extensions": ["ott"] + }, + "application/vnd.oasis.opendocument.text-web": { + "source": "iana", + "extensions": ["oth"] + }, + "application/vnd.obn": { + "source": "iana" + }, + "application/vnd.ocf+cbor": { + "source": "iana" + }, + "application/vnd.oci.image.manifest.v1+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oftn.l10n+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessdownload+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessstreaming+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.cspg-hexbinary": { + "source": "iana" + }, + "application/vnd.oipf.dae.svg+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.dae.xhtml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.mippvcontrolmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.pae.gem": { + "source": "iana" + }, + "application/vnd.oipf.spdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.spdlist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.ueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.userprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.olpc-sugar": { + "source": "iana", + "extensions": ["xo"] + }, + "application/vnd.oma-scws-config": { + "source": "iana" + }, + "application/vnd.oma-scws-http-request": { + "source": "iana" + }, + "application/vnd.oma-scws-http-response": { + "source": "iana" + }, + "application/vnd.oma.bcast.associated-procedure-parameter+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.drm-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.imd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.ltkm": { + "source": "iana" + }, + "application/vnd.oma.bcast.notification+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.provisioningtrigger": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgboot": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sgdu": { + "source": "iana" + }, + "application/vnd.oma.bcast.simple-symbol-container": { + "source": "iana" + }, + "application/vnd.oma.bcast.smartcard-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sprov+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.stkm": { + "source": "iana" + }, + "application/vnd.oma.cab-address-book+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-feature-handler+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-pcc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-subs-invite+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-user-prefs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.dcd": { + "source": "iana" + }, + "application/vnd.oma.dcdc": { + "source": "iana" + }, + "application/vnd.oma.dd2+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dd2"] + }, + "application/vnd.oma.drm.risd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.group-usage-list+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+cbor": { + "source": "iana" + }, + "application/vnd.oma.lwm2m+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+tlv": { + "source": "iana" + }, + "application/vnd.oma.pal+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.detailed-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.final-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.groups+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.invocation-descriptor+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.optimized-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.push": { + "source": "iana" + }, + "application/vnd.oma.scidm.messages+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.xcap-directory+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-email+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-file+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-folder+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omaloc-supl-init": { + "source": "iana" + }, + "application/vnd.onepager": { + "source": "iana" + }, + "application/vnd.onepagertamp": { + "source": "iana" + }, + "application/vnd.onepagertamx": { + "source": "iana" + }, + "application/vnd.onepagertat": { + "source": "iana" + }, + "application/vnd.onepagertatp": { + "source": "iana" + }, + "application/vnd.onepagertatx": { + "source": "iana" + }, + "application/vnd.openblox.game+xml": { + "source": "iana", + "compressible": true, + "extensions": ["obgx"] + }, + "application/vnd.openblox.game-binary": { + "source": "iana" + }, + "application/vnd.openeye.oeb": { + "source": "iana" + }, + "application/vnd.openofficeorg.extension": { + "source": "apache", + "extensions": ["oxt"] + }, + "application/vnd.openstreetmap.data+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osm"] + }, + "application/vnd.opentimestamps.ots": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.custom-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawing+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.extended-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["pptx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide": { + "source": "iana", + "extensions": ["sldx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { + "source": "iana", + "extensions": ["ppsx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.template": { + "source": "iana", + "extensions": ["potx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { + "source": "iana", + "compressible": false, + "extensions": ["xlsx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { + "source": "iana", + "extensions": ["xltx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.theme+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.themeoverride+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.vmldrawing": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { + "source": "iana", + "compressible": false, + "extensions": ["docx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { + "source": "iana", + "extensions": ["dotx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.core-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.relationships+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oracle.resource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.orange.indata": { + "source": "iana" + }, + "application/vnd.osa.netdeploy": { + "source": "iana" + }, + "application/vnd.osgeo.mapguide.package": { + "source": "iana", + "extensions": ["mgp"] + }, + "application/vnd.osgi.bundle": { + "source": "iana" + }, + "application/vnd.osgi.dp": { + "source": "iana", + "extensions": ["dp"] + }, + "application/vnd.osgi.subsystem": { + "source": "iana", + "extensions": ["esa"] + }, + "application/vnd.otps.ct-kip+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oxli.countgraph": { + "source": "iana" + }, + "application/vnd.pagerduty+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.palm": { + "source": "iana", + "extensions": ["pdb","pqa","oprc"] + }, + "application/vnd.panoply": { + "source": "iana" + }, + "application/vnd.paos.xml": { + "source": "iana" + }, + "application/vnd.patentdive": { + "source": "iana" + }, + "application/vnd.patientecommsdoc": { + "source": "iana" + }, + "application/vnd.pawaafile": { + "source": "iana", + "extensions": ["paw"] + }, + "application/vnd.pcos": { + "source": "iana" + }, + "application/vnd.pg.format": { + "source": "iana", + "extensions": ["str"] + }, + "application/vnd.pg.osasli": { + "source": "iana", + "extensions": ["ei6"] + }, + "application/vnd.piaccess.application-licence": { + "source": "iana" + }, + "application/vnd.picsel": { + "source": "iana", + "extensions": ["efif"] + }, + "application/vnd.pmi.widget": { + "source": "iana", + "extensions": ["wg"] + }, + "application/vnd.poc.group-advertisement+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.pocketlearn": { + "source": "iana", + "extensions": ["plf"] + }, + "application/vnd.powerbuilder6": { + "source": "iana", + "extensions": ["pbd"] + }, + "application/vnd.powerbuilder6-s": { + "source": "iana" + }, + "application/vnd.powerbuilder7": { + "source": "iana" + }, + "application/vnd.powerbuilder7-s": { + "source": "iana" + }, + "application/vnd.powerbuilder75": { + "source": "iana" + }, + "application/vnd.powerbuilder75-s": { + "source": "iana" + }, + "application/vnd.preminet": { + "source": "iana" + }, + "application/vnd.previewsystems.box": { + "source": "iana", + "extensions": ["box"] + }, + "application/vnd.proteus.magazine": { + "source": "iana", + "extensions": ["mgz"] + }, + "application/vnd.psfs": { + "source": "iana" + }, + "application/vnd.publishare-delta-tree": { + "source": "iana", + "extensions": ["qps"] + }, + "application/vnd.pvi.ptid1": { + "source": "iana", + "extensions": ["ptid"] + }, + "application/vnd.pwg-multiplexed": { + "source": "iana" + }, + "application/vnd.pwg-xhtml-print+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.qualcomm.brew-app-res": { + "source": "iana" + }, + "application/vnd.quarantainenet": { + "source": "iana" + }, + "application/vnd.quark.quarkxpress": { + "source": "iana", + "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] + }, + "application/vnd.quobject-quoxdocument": { + "source": "iana" + }, + "application/vnd.radisys.moml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-stream+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-base+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-detect+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-group+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-speech+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-transform+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.rainstor.data": { + "source": "iana" + }, + "application/vnd.rapid": { + "source": "iana" + }, + "application/vnd.rar": { + "source": "iana", + "extensions": ["rar"] + }, + "application/vnd.realvnc.bed": { + "source": "iana", + "extensions": ["bed"] + }, + "application/vnd.recordare.musicxml": { + "source": "iana", + "extensions": ["mxl"] + }, + "application/vnd.recordare.musicxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musicxml"] + }, + "application/vnd.renlearn.rlprint": { + "source": "iana" + }, + "application/vnd.resilient.logic": { + "source": "iana" + }, + "application/vnd.restful+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.rig.cryptonote": { + "source": "iana", + "extensions": ["cryptonote"] + }, + "application/vnd.rim.cod": { + "source": "apache", + "extensions": ["cod"] + }, + "application/vnd.rn-realmedia": { + "source": "apache", + "extensions": ["rm"] + }, + "application/vnd.rn-realmedia-vbr": { + "source": "apache", + "extensions": ["rmvb"] + }, + "application/vnd.route66.link66+xml": { + "source": "iana", + "compressible": true, + "extensions": ["link66"] + }, + "application/vnd.rs-274x": { + "source": "iana" + }, + "application/vnd.ruckus.download": { + "source": "iana" + }, + "application/vnd.s3sms": { + "source": "iana" + }, + "application/vnd.sailingtracker.track": { + "source": "iana", + "extensions": ["st"] + }, + "application/vnd.sar": { + "source": "iana" + }, + "application/vnd.sbm.cid": { + "source": "iana" + }, + "application/vnd.sbm.mid2": { + "source": "iana" + }, + "application/vnd.scribus": { + "source": "iana" + }, + "application/vnd.sealed.3df": { + "source": "iana" + }, + "application/vnd.sealed.csf": { + "source": "iana" + }, + "application/vnd.sealed.doc": { + "source": "iana" + }, + "application/vnd.sealed.eml": { + "source": "iana" + }, + "application/vnd.sealed.mht": { + "source": "iana" + }, + "application/vnd.sealed.net": { + "source": "iana" + }, + "application/vnd.sealed.ppt": { + "source": "iana" + }, + "application/vnd.sealed.tiff": { + "source": "iana" + }, + "application/vnd.sealed.xls": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.html": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.pdf": { + "source": "iana" + }, + "application/vnd.seemail": { + "source": "iana", + "extensions": ["see"] + }, + "application/vnd.seis+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.sema": { + "source": "iana", + "extensions": ["sema"] + }, + "application/vnd.semd": { + "source": "iana", + "extensions": ["semd"] + }, + "application/vnd.semf": { + "source": "iana", + "extensions": ["semf"] + }, + "application/vnd.shade-save-file": { + "source": "iana" + }, + "application/vnd.shana.informed.formdata": { + "source": "iana", + "extensions": ["ifm"] + }, + "application/vnd.shana.informed.formtemplate": { + "source": "iana", + "extensions": ["itp"] + }, + "application/vnd.shana.informed.interchange": { + "source": "iana", + "extensions": ["iif"] + }, + "application/vnd.shana.informed.package": { + "source": "iana", + "extensions": ["ipk"] + }, + "application/vnd.shootproof+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shopkick+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shp": { + "source": "iana" + }, + "application/vnd.shx": { + "source": "iana" + }, + "application/vnd.sigrok.session": { + "source": "iana" + }, + "application/vnd.simtech-mindmapper": { + "source": "iana", + "extensions": ["twd","twds"] + }, + "application/vnd.siren+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.smaf": { + "source": "iana", + "extensions": ["mmf"] + }, + "application/vnd.smart.notebook": { + "source": "iana" + }, + "application/vnd.smart.teacher": { + "source": "iana", + "extensions": ["teacher"] + }, + "application/vnd.snesdev-page-table": { + "source": "iana" + }, + "application/vnd.software602.filler.form+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fo"] + }, + "application/vnd.software602.filler.form-xml-zip": { + "source": "iana" + }, + "application/vnd.solent.sdkm+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sdkm","sdkd"] + }, + "application/vnd.spotfire.dxp": { + "source": "iana", + "extensions": ["dxp"] + }, + "application/vnd.spotfire.sfs": { + "source": "iana", + "extensions": ["sfs"] + }, + "application/vnd.sqlite3": { + "source": "iana" + }, + "application/vnd.sss-cod": { + "source": "iana" + }, + "application/vnd.sss-dtf": { + "source": "iana" + }, + "application/vnd.sss-ntf": { + "source": "iana" + }, + "application/vnd.stardivision.calc": { + "source": "apache", + "extensions": ["sdc"] + }, + "application/vnd.stardivision.draw": { + "source": "apache", + "extensions": ["sda"] + }, + "application/vnd.stardivision.impress": { + "source": "apache", + "extensions": ["sdd"] + }, + "application/vnd.stardivision.math": { + "source": "apache", + "extensions": ["smf"] + }, + "application/vnd.stardivision.writer": { + "source": "apache", + "extensions": ["sdw","vor"] + }, + "application/vnd.stardivision.writer-global": { + "source": "apache", + "extensions": ["sgl"] + }, + "application/vnd.stepmania.package": { + "source": "iana", + "extensions": ["smzip"] + }, + "application/vnd.stepmania.stepchart": { + "source": "iana", + "extensions": ["sm"] + }, + "application/vnd.street-stream": { + "source": "iana" + }, + "application/vnd.sun.wadl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wadl"] + }, + "application/vnd.sun.xml.calc": { + "source": "apache", + "extensions": ["sxc"] + }, + "application/vnd.sun.xml.calc.template": { + "source": "apache", + "extensions": ["stc"] + }, + "application/vnd.sun.xml.draw": { + "source": "apache", + "extensions": ["sxd"] + }, + "application/vnd.sun.xml.draw.template": { + "source": "apache", + "extensions": ["std"] + }, + "application/vnd.sun.xml.impress": { + "source": "apache", + "extensions": ["sxi"] + }, + "application/vnd.sun.xml.impress.template": { + "source": "apache", + "extensions": ["sti"] + }, + "application/vnd.sun.xml.math": { + "source": "apache", + "extensions": ["sxm"] + }, + "application/vnd.sun.xml.writer": { + "source": "apache", + "extensions": ["sxw"] + }, + "application/vnd.sun.xml.writer.global": { + "source": "apache", + "extensions": ["sxg"] + }, + "application/vnd.sun.xml.writer.template": { + "source": "apache", + "extensions": ["stw"] + }, + "application/vnd.sus-calendar": { + "source": "iana", + "extensions": ["sus","susp"] + }, + "application/vnd.svd": { + "source": "iana", + "extensions": ["svd"] + }, + "application/vnd.swiftview-ics": { + "source": "iana" + }, + "application/vnd.sycle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.syft+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.symbian.install": { + "source": "apache", + "extensions": ["sis","sisx"] + }, + "application/vnd.syncml+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xsm"] + }, + "application/vnd.syncml.dm+wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["bdm"] + }, + "application/vnd.syncml.dm+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xdm"] + }, + "application/vnd.syncml.dm.notification": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["ddf"] + }, + "application/vnd.syncml.dmtnds+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.syncml.ds.notification": { + "source": "iana" + }, + "application/vnd.tableschema+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tao.intent-module-archive": { + "source": "iana", + "extensions": ["tao"] + }, + "application/vnd.tcpdump.pcap": { + "source": "iana", + "extensions": ["pcap","cap","dmp"] + }, + "application/vnd.think-cell.ppttc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tmd.mediaflex.api+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.tml": { + "source": "iana" + }, + "application/vnd.tmobile-livetv": { + "source": "iana", + "extensions": ["tmo"] + }, + "application/vnd.tri.onesource": { + "source": "iana" + }, + "application/vnd.trid.tpt": { + "source": "iana", + "extensions": ["tpt"] + }, + "application/vnd.triscape.mxs": { + "source": "iana", + "extensions": ["mxs"] + }, + "application/vnd.trueapp": { + "source": "iana", + "extensions": ["tra"] + }, + "application/vnd.truedoc": { + "source": "iana" + }, + "application/vnd.ubisoft.webplayer": { + "source": "iana" + }, + "application/vnd.ufdl": { + "source": "iana", + "extensions": ["ufd","ufdl"] + }, + "application/vnd.uiq.theme": { + "source": "iana", + "extensions": ["utz"] + }, + "application/vnd.umajin": { + "source": "iana", + "extensions": ["umj"] + }, + "application/vnd.unity": { + "source": "iana", + "extensions": ["unityweb"] + }, + "application/vnd.uoml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uoml"] + }, + "application/vnd.uplanet.alert": { + "source": "iana" + }, + "application/vnd.uplanet.alert-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.channel": { + "source": "iana" + }, + "application/vnd.uplanet.channel-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.list": { + "source": "iana" + }, + "application/vnd.uplanet.list-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.signal": { + "source": "iana" + }, + "application/vnd.uri-map": { + "source": "iana" + }, + "application/vnd.valve.source.material": { + "source": "iana" + }, + "application/vnd.vcx": { + "source": "iana", + "extensions": ["vcx"] + }, + "application/vnd.vd-study": { + "source": "iana" + }, + "application/vnd.vectorworks": { + "source": "iana" + }, + "application/vnd.vel+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.verimatrix.vcas": { + "source": "iana" + }, + "application/vnd.veritone.aion+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.veryant.thin": { + "source": "iana" + }, + "application/vnd.ves.encrypted": { + "source": "iana" + }, + "application/vnd.vidsoft.vidconference": { + "source": "iana" + }, + "application/vnd.visio": { + "source": "iana", + "extensions": ["vsd","vst","vss","vsw"] + }, + "application/vnd.visionary": { + "source": "iana", + "extensions": ["vis"] + }, + "application/vnd.vividence.scriptfile": { + "source": "iana" + }, + "application/vnd.vsf": { + "source": "iana", + "extensions": ["vsf"] + }, + "application/vnd.wap.sic": { + "source": "iana" + }, + "application/vnd.wap.slc": { + "source": "iana" + }, + "application/vnd.wap.wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["wbxml"] + }, + "application/vnd.wap.wmlc": { + "source": "iana", + "extensions": ["wmlc"] + }, + "application/vnd.wap.wmlscriptc": { + "source": "iana", + "extensions": ["wmlsc"] + }, + "application/vnd.webturbo": { + "source": "iana", + "extensions": ["wtb"] + }, + "application/vnd.wfa.dpp": { + "source": "iana" + }, + "application/vnd.wfa.p2p": { + "source": "iana" + }, + "application/vnd.wfa.wsc": { + "source": "iana" + }, + "application/vnd.windows.devicepairing": { + "source": "iana" + }, + "application/vnd.wmc": { + "source": "iana" + }, + "application/vnd.wmf.bootstrap": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica.package": { + "source": "iana" + }, + "application/vnd.wolfram.player": { + "source": "iana", + "extensions": ["nbp"] + }, + "application/vnd.wordperfect": { + "source": "iana", + "extensions": ["wpd"] + }, + "application/vnd.wqd": { + "source": "iana", + "extensions": ["wqd"] + }, + "application/vnd.wrq-hp3000-labelled": { + "source": "iana" + }, + "application/vnd.wt.stf": { + "source": "iana", + "extensions": ["stf"] + }, + "application/vnd.wv.csp+wbxml": { + "source": "iana" + }, + "application/vnd.wv.csp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.wv.ssp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xacml+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.xara": { + "source": "iana", + "extensions": ["xar"] + }, + "application/vnd.xfdl": { + "source": "iana", + "extensions": ["xfdl"] + }, + "application/vnd.xfdl.webform": { + "source": "iana" + }, + "application/vnd.xmi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xmpie.cpkg": { + "source": "iana" + }, + "application/vnd.xmpie.dpkg": { + "source": "iana" + }, + "application/vnd.xmpie.plan": { + "source": "iana" + }, + "application/vnd.xmpie.ppkg": { + "source": "iana" + }, + "application/vnd.xmpie.xlim": { + "source": "iana" + }, + "application/vnd.yamaha.hv-dic": { + "source": "iana", + "extensions": ["hvd"] + }, + "application/vnd.yamaha.hv-script": { + "source": "iana", + "extensions": ["hvs"] + }, + "application/vnd.yamaha.hv-voice": { + "source": "iana", + "extensions": ["hvp"] + }, + "application/vnd.yamaha.openscoreformat": { + "source": "iana", + "extensions": ["osf"] + }, + "application/vnd.yamaha.openscoreformat.osfpvg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osfpvg"] + }, + "application/vnd.yamaha.remote-setup": { + "source": "iana" + }, + "application/vnd.yamaha.smaf-audio": { + "source": "iana", + "extensions": ["saf"] + }, + "application/vnd.yamaha.smaf-phrase": { + "source": "iana", + "extensions": ["spf"] + }, + "application/vnd.yamaha.through-ngn": { + "source": "iana" + }, + "application/vnd.yamaha.tunnel-udpencap": { + "source": "iana" + }, + "application/vnd.yaoweme": { + "source": "iana" + }, + "application/vnd.yellowriver-custom-menu": { + "source": "iana", + "extensions": ["cmp"] + }, + "application/vnd.youtube.yt": { + "source": "iana" + }, + "application/vnd.zul": { + "source": "iana", + "extensions": ["zir","zirz"] + }, + "application/vnd.zzazz.deck+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zaz"] + }, + "application/voicexml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["vxml"] + }, + "application/voucher-cms+json": { + "source": "iana", + "compressible": true + }, + "application/vq-rtcpxr": { + "source": "iana" + }, + "application/wasm": { + "source": "iana", + "compressible": true, + "extensions": ["wasm"] + }, + "application/watcherinfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wif"] + }, + "application/webpush-options+json": { + "source": "iana", + "compressible": true + }, + "application/whoispp-query": { + "source": "iana" + }, + "application/whoispp-response": { + "source": "iana" + }, + "application/widget": { + "source": "iana", + "extensions": ["wgt"] + }, + "application/winhlp": { + "source": "apache", + "extensions": ["hlp"] + }, + "application/wita": { + "source": "iana" + }, + "application/wordperfect5.1": { + "source": "iana" + }, + "application/wsdl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wsdl"] + }, + "application/wspolicy+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wspolicy"] + }, + "application/x-7z-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["7z"] + }, + "application/x-abiword": { + "source": "apache", + "extensions": ["abw"] + }, + "application/x-ace-compressed": { + "source": "apache", + "extensions": ["ace"] + }, + "application/x-amf": { + "source": "apache" + }, + "application/x-apple-diskimage": { + "source": "apache", + "extensions": ["dmg"] + }, + "application/x-arj": { + "compressible": false, + "extensions": ["arj"] + }, + "application/x-authorware-bin": { + "source": "apache", + "extensions": ["aab","x32","u32","vox"] + }, + "application/x-authorware-map": { + "source": "apache", + "extensions": ["aam"] + }, + "application/x-authorware-seg": { + "source": "apache", + "extensions": ["aas"] + }, + "application/x-bcpio": { + "source": "apache", + "extensions": ["bcpio"] + }, + "application/x-bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/x-bittorrent": { + "source": "apache", + "extensions": ["torrent"] + }, + "application/x-blorb": { + "source": "apache", + "extensions": ["blb","blorb"] + }, + "application/x-bzip": { + "source": "apache", + "compressible": false, + "extensions": ["bz"] + }, + "application/x-bzip2": { + "source": "apache", + "compressible": false, + "extensions": ["bz2","boz"] + }, + "application/x-cbr": { + "source": "apache", + "extensions": ["cbr","cba","cbt","cbz","cb7"] + }, + "application/x-cdlink": { + "source": "apache", + "extensions": ["vcd"] + }, + "application/x-cfs-compressed": { + "source": "apache", + "extensions": ["cfs"] + }, + "application/x-chat": { + "source": "apache", + "extensions": ["chat"] + }, + "application/x-chess-pgn": { + "source": "apache", + "extensions": ["pgn"] + }, + "application/x-chrome-extension": { + "extensions": ["crx"] + }, + "application/x-cocoa": { + "source": "nginx", + "extensions": ["cco"] + }, + "application/x-compress": { + "source": "apache" + }, + "application/x-conference": { + "source": "apache", + "extensions": ["nsc"] + }, + "application/x-cpio": { + "source": "apache", + "extensions": ["cpio"] + }, + "application/x-csh": { + "source": "apache", + "extensions": ["csh"] + }, + "application/x-deb": { + "compressible": false + }, + "application/x-debian-package": { + "source": "apache", + "extensions": ["deb","udeb"] + }, + "application/x-dgc-compressed": { + "source": "apache", + "extensions": ["dgc"] + }, + "application/x-director": { + "source": "apache", + "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] + }, + "application/x-doom": { + "source": "apache", + "extensions": ["wad"] + }, + "application/x-dtbncx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ncx"] + }, + "application/x-dtbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dtb"] + }, + "application/x-dtbresource+xml": { + "source": "apache", + "compressible": true, + "extensions": ["res"] + }, + "application/x-dvi": { + "source": "apache", + "compressible": false, + "extensions": ["dvi"] + }, + "application/x-envoy": { + "source": "apache", + "extensions": ["evy"] + }, + "application/x-eva": { + "source": "apache", + "extensions": ["eva"] + }, + "application/x-font-bdf": { + "source": "apache", + "extensions": ["bdf"] + }, + "application/x-font-dos": { + "source": "apache" + }, + "application/x-font-framemaker": { + "source": "apache" + }, + "application/x-font-ghostscript": { + "source": "apache", + "extensions": ["gsf"] + }, + "application/x-font-libgrx": { + "source": "apache" + }, + "application/x-font-linux-psf": { + "source": "apache", + "extensions": ["psf"] + }, + "application/x-font-pcf": { + "source": "apache", + "extensions": ["pcf"] + }, + "application/x-font-snf": { + "source": "apache", + "extensions": ["snf"] + }, + "application/x-font-speedo": { + "source": "apache" + }, + "application/x-font-sunos-news": { + "source": "apache" + }, + "application/x-font-type1": { + "source": "apache", + "extensions": ["pfa","pfb","pfm","afm"] + }, + "application/x-font-vfont": { + "source": "apache" + }, + "application/x-freearc": { + "source": "apache", + "extensions": ["arc"] + }, + "application/x-futuresplash": { + "source": "apache", + "extensions": ["spl"] + }, + "application/x-gca-compressed": { + "source": "apache", + "extensions": ["gca"] + }, + "application/x-glulx": { + "source": "apache", + "extensions": ["ulx"] + }, + "application/x-gnumeric": { + "source": "apache", + "extensions": ["gnumeric"] + }, + "application/x-gramps-xml": { + "source": "apache", + "extensions": ["gramps"] + }, + "application/x-gtar": { + "source": "apache", + "extensions": ["gtar"] + }, + "application/x-gzip": { + "source": "apache" + }, + "application/x-hdf": { + "source": "apache", + "extensions": ["hdf"] + }, + "application/x-httpd-php": { + "compressible": true, + "extensions": ["php"] + }, + "application/x-install-instructions": { + "source": "apache", + "extensions": ["install"] + }, + "application/x-iso9660-image": { + "source": "apache", + "extensions": ["iso"] + }, + "application/x-iwork-keynote-sffkey": { + "extensions": ["key"] + }, + "application/x-iwork-numbers-sffnumbers": { + "extensions": ["numbers"] + }, + "application/x-iwork-pages-sffpages": { + "extensions": ["pages"] + }, + "application/x-java-archive-diff": { + "source": "nginx", + "extensions": ["jardiff"] + }, + "application/x-java-jnlp-file": { + "source": "apache", + "compressible": false, + "extensions": ["jnlp"] + }, + "application/x-javascript": { + "compressible": true + }, + "application/x-keepass2": { + "extensions": ["kdbx"] + }, + "application/x-latex": { + "source": "apache", + "compressible": false, + "extensions": ["latex"] + }, + "application/x-lua-bytecode": { + "extensions": ["luac"] + }, + "application/x-lzh-compressed": { + "source": "apache", + "extensions": ["lzh","lha"] + }, + "application/x-makeself": { + "source": "nginx", + "extensions": ["run"] + }, + "application/x-mie": { + "source": "apache", + "extensions": ["mie"] + }, + "application/x-mobipocket-ebook": { + "source": "apache", + "extensions": ["prc","mobi"] + }, + "application/x-mpegurl": { + "compressible": false + }, + "application/x-ms-application": { + "source": "apache", + "extensions": ["application"] + }, + "application/x-ms-shortcut": { + "source": "apache", + "extensions": ["lnk"] + }, + "application/x-ms-wmd": { + "source": "apache", + "extensions": ["wmd"] + }, + "application/x-ms-wmz": { + "source": "apache", + "extensions": ["wmz"] + }, + "application/x-ms-xbap": { + "source": "apache", + "extensions": ["xbap"] + }, + "application/x-msaccess": { + "source": "apache", + "extensions": ["mdb"] + }, + "application/x-msbinder": { + "source": "apache", + "extensions": ["obd"] + }, + "application/x-mscardfile": { + "source": "apache", + "extensions": ["crd"] + }, + "application/x-msclip": { + "source": "apache", + "extensions": ["clp"] + }, + "application/x-msdos-program": { + "extensions": ["exe"] + }, + "application/x-msdownload": { + "source": "apache", + "extensions": ["exe","dll","com","bat","msi"] + }, + "application/x-msmediaview": { + "source": "apache", + "extensions": ["mvb","m13","m14"] + }, + "application/x-msmetafile": { + "source": "apache", + "extensions": ["wmf","wmz","emf","emz"] + }, + "application/x-msmoney": { + "source": "apache", + "extensions": ["mny"] + }, + "application/x-mspublisher": { + "source": "apache", + "extensions": ["pub"] + }, + "application/x-msschedule": { + "source": "apache", + "extensions": ["scd"] + }, + "application/x-msterminal": { + "source": "apache", + "extensions": ["trm"] + }, + "application/x-mswrite": { + "source": "apache", + "extensions": ["wri"] + }, + "application/x-netcdf": { + "source": "apache", + "extensions": ["nc","cdf"] + }, + "application/x-ns-proxy-autoconfig": { + "compressible": true, + "extensions": ["pac"] + }, + "application/x-nzb": { + "source": "apache", + "extensions": ["nzb"] + }, + "application/x-perl": { + "source": "nginx", + "extensions": ["pl","pm"] + }, + "application/x-pilot": { + "source": "nginx", + "extensions": ["prc","pdb"] + }, + "application/x-pkcs12": { + "source": "apache", + "compressible": false, + "extensions": ["p12","pfx"] + }, + "application/x-pkcs7-certificates": { + "source": "apache", + "extensions": ["p7b","spc"] + }, + "application/x-pkcs7-certreqresp": { + "source": "apache", + "extensions": ["p7r"] + }, + "application/x-pki-message": { + "source": "iana" + }, + "application/x-rar-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["rar"] + }, + "application/x-redhat-package-manager": { + "source": "nginx", + "extensions": ["rpm"] + }, + "application/x-research-info-systems": { + "source": "apache", + "extensions": ["ris"] + }, + "application/x-sea": { + "source": "nginx", + "extensions": ["sea"] + }, + "application/x-sh": { + "source": "apache", + "compressible": true, + "extensions": ["sh"] + }, + "application/x-shar": { + "source": "apache", + "extensions": ["shar"] + }, + "application/x-shockwave-flash": { + "source": "apache", + "compressible": false, + "extensions": ["swf"] + }, + "application/x-silverlight-app": { + "source": "apache", + "extensions": ["xap"] + }, + "application/x-sql": { + "source": "apache", + "extensions": ["sql"] + }, + "application/x-stuffit": { + "source": "apache", + "compressible": false, + "extensions": ["sit"] + }, + "application/x-stuffitx": { + "source": "apache", + "extensions": ["sitx"] + }, + "application/x-subrip": { + "source": "apache", + "extensions": ["srt"] + }, + "application/x-sv4cpio": { + "source": "apache", + "extensions": ["sv4cpio"] + }, + "application/x-sv4crc": { + "source": "apache", + "extensions": ["sv4crc"] + }, + "application/x-t3vm-image": { + "source": "apache", + "extensions": ["t3"] + }, + "application/x-tads": { + "source": "apache", + "extensions": ["gam"] + }, + "application/x-tar": { + "source": "apache", + "compressible": true, + "extensions": ["tar"] + }, + "application/x-tcl": { + "source": "apache", + "extensions": ["tcl","tk"] + }, + "application/x-tex": { + "source": "apache", + "extensions": ["tex"] + }, + "application/x-tex-tfm": { + "source": "apache", + "extensions": ["tfm"] + }, + "application/x-texinfo": { + "source": "apache", + "extensions": ["texinfo","texi"] + }, + "application/x-tgif": { + "source": "apache", + "extensions": ["obj"] + }, + "application/x-ustar": { + "source": "apache", + "extensions": ["ustar"] + }, + "application/x-virtualbox-hdd": { + "compressible": true, + "extensions": ["hdd"] + }, + "application/x-virtualbox-ova": { + "compressible": true, + "extensions": ["ova"] + }, + "application/x-virtualbox-ovf": { + "compressible": true, + "extensions": ["ovf"] + }, + "application/x-virtualbox-vbox": { + "compressible": true, + "extensions": ["vbox"] + }, + "application/x-virtualbox-vbox-extpack": { + "compressible": false, + "extensions": ["vbox-extpack"] + }, + "application/x-virtualbox-vdi": { + "compressible": true, + "extensions": ["vdi"] + }, + "application/x-virtualbox-vhd": { + "compressible": true, + "extensions": ["vhd"] + }, + "application/x-virtualbox-vmdk": { + "compressible": true, + "extensions": ["vmdk"] + }, + "application/x-wais-source": { + "source": "apache", + "extensions": ["src"] + }, + "application/x-web-app-manifest+json": { + "compressible": true, + "extensions": ["webapp"] + }, + "application/x-www-form-urlencoded": { + "source": "iana", + "compressible": true + }, + "application/x-x509-ca-cert": { + "source": "iana", + "extensions": ["der","crt","pem"] + }, + "application/x-x509-ca-ra-cert": { + "source": "iana" + }, + "application/x-x509-next-ca-cert": { + "source": "iana" + }, + "application/x-xfig": { + "source": "apache", + "extensions": ["fig"] + }, + "application/x-xliff+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xlf"] + }, + "application/x-xpinstall": { + "source": "apache", + "compressible": false, + "extensions": ["xpi"] + }, + "application/x-xz": { + "source": "apache", + "extensions": ["xz"] + }, + "application/x-zmachine": { + "source": "apache", + "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] + }, + "application/x400-bp": { + "source": "iana" + }, + "application/xacml+xml": { + "source": "iana", + "compressible": true + }, + "application/xaml+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xaml"] + }, + "application/xcap-att+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xav"] + }, + "application/xcap-caps+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xca"] + }, + "application/xcap-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdf"] + }, + "application/xcap-el+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xel"] + }, + "application/xcap-error+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-ns+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xns"] + }, + "application/xcon-conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/xcon-conference-info-diff+xml": { + "source": "iana", + "compressible": true + }, + "application/xenc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xenc"] + }, + "application/xhtml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xhtml","xht"] + }, + "application/xhtml-voice+xml": { + "source": "apache", + "compressible": true + }, + "application/xliff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xlf"] + }, + "application/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml","xsl","xsd","rng"] + }, + "application/xml-dtd": { + "source": "iana", + "compressible": true, + "extensions": ["dtd"] + }, + "application/xml-external-parsed-entity": { + "source": "iana" + }, + "application/xml-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/xmpp+xml": { + "source": "iana", + "compressible": true + }, + "application/xop+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xop"] + }, + "application/xproc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xpl"] + }, + "application/xslt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xsl","xslt"] + }, + "application/xspf+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xspf"] + }, + "application/xv+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mxml","xhvml","xvml","xvm"] + }, + "application/yang": { + "source": "iana", + "extensions": ["yang"] + }, + "application/yang-data+json": { + "source": "iana", + "compressible": true + }, + "application/yang-data+xml": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+json": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/yin+xml": { + "source": "iana", + "compressible": true, + "extensions": ["yin"] + }, + "application/zip": { + "source": "iana", + "compressible": false, + "extensions": ["zip"] + }, + "application/zlib": { + "source": "iana" + }, + "application/zstd": { + "source": "iana" + }, + "audio/1d-interleaved-parityfec": { + "source": "iana" + }, + "audio/32kadpcm": { + "source": "iana" + }, + "audio/3gpp": { + "source": "iana", + "compressible": false, + "extensions": ["3gpp"] + }, + "audio/3gpp2": { + "source": "iana" + }, + "audio/aac": { + "source": "iana" + }, + "audio/ac3": { + "source": "iana" + }, + "audio/adpcm": { + "source": "apache", + "extensions": ["adp"] + }, + "audio/amr": { + "source": "iana", + "extensions": ["amr"] + }, + "audio/amr-wb": { + "source": "iana" + }, + "audio/amr-wb+": { + "source": "iana" + }, + "audio/aptx": { + "source": "iana" + }, + "audio/asc": { + "source": "iana" + }, + "audio/atrac-advanced-lossless": { + "source": "iana" + }, + "audio/atrac-x": { + "source": "iana" + }, + "audio/atrac3": { + "source": "iana" + }, + "audio/basic": { + "source": "iana", + "compressible": false, + "extensions": ["au","snd"] + }, + "audio/bv16": { + "source": "iana" + }, + "audio/bv32": { + "source": "iana" + }, + "audio/clearmode": { + "source": "iana" + }, + "audio/cn": { + "source": "iana" + }, + "audio/dat12": { + "source": "iana" + }, + "audio/dls": { + "source": "iana" + }, + "audio/dsr-es201108": { + "source": "iana" + }, + "audio/dsr-es202050": { + "source": "iana" + }, + "audio/dsr-es202211": { + "source": "iana" + }, + "audio/dsr-es202212": { + "source": "iana" + }, + "audio/dv": { + "source": "iana" + }, + "audio/dvi4": { + "source": "iana" + }, + "audio/eac3": { + "source": "iana" + }, + "audio/encaprtp": { + "source": "iana" + }, + "audio/evrc": { + "source": "iana" + }, + "audio/evrc-qcp": { + "source": "iana" + }, + "audio/evrc0": { + "source": "iana" + }, + "audio/evrc1": { + "source": "iana" + }, + "audio/evrcb": { + "source": "iana" + }, + "audio/evrcb0": { + "source": "iana" + }, + "audio/evrcb1": { + "source": "iana" + }, + "audio/evrcnw": { + "source": "iana" + }, + "audio/evrcnw0": { + "source": "iana" + }, + "audio/evrcnw1": { + "source": "iana" + }, + "audio/evrcwb": { + "source": "iana" + }, + "audio/evrcwb0": { + "source": "iana" + }, + "audio/evrcwb1": { + "source": "iana" + }, + "audio/evs": { + "source": "iana" + }, + "audio/flexfec": { + "source": "iana" + }, + "audio/fwdred": { + "source": "iana" + }, + "audio/g711-0": { + "source": "iana" + }, + "audio/g719": { + "source": "iana" + }, + "audio/g722": { + "source": "iana" + }, + "audio/g7221": { + "source": "iana" + }, + "audio/g723": { + "source": "iana" + }, + "audio/g726-16": { + "source": "iana" + }, + "audio/g726-24": { + "source": "iana" + }, + "audio/g726-32": { + "source": "iana" + }, + "audio/g726-40": { + "source": "iana" + }, + "audio/g728": { + "source": "iana" + }, + "audio/g729": { + "source": "iana" + }, + "audio/g7291": { + "source": "iana" + }, + "audio/g729d": { + "source": "iana" + }, + "audio/g729e": { + "source": "iana" + }, + "audio/gsm": { + "source": "iana" + }, + "audio/gsm-efr": { + "source": "iana" + }, + "audio/gsm-hr-08": { + "source": "iana" + }, + "audio/ilbc": { + "source": "iana" + }, + "audio/ip-mr_v2.5": { + "source": "iana" + }, + "audio/isac": { + "source": "apache" + }, + "audio/l16": { + "source": "iana" + }, + "audio/l20": { + "source": "iana" + }, + "audio/l24": { + "source": "iana", + "compressible": false + }, + "audio/l8": { + "source": "iana" + }, + "audio/lpc": { + "source": "iana" + }, + "audio/melp": { + "source": "iana" + }, + "audio/melp1200": { + "source": "iana" + }, + "audio/melp2400": { + "source": "iana" + }, + "audio/melp600": { + "source": "iana" + }, + "audio/mhas": { + "source": "iana" + }, + "audio/midi": { + "source": "apache", + "extensions": ["mid","midi","kar","rmi"] + }, + "audio/mobile-xmf": { + "source": "iana", + "extensions": ["mxmf"] + }, + "audio/mp3": { + "compressible": false, + "extensions": ["mp3"] + }, + "audio/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["m4a","mp4a"] + }, + "audio/mp4a-latm": { + "source": "iana" + }, + "audio/mpa": { + "source": "iana" + }, + "audio/mpa-robust": { + "source": "iana" + }, + "audio/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] + }, + "audio/mpeg4-generic": { + "source": "iana" + }, + "audio/musepack": { + "source": "apache" + }, + "audio/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["oga","ogg","spx","opus"] + }, + "audio/opus": { + "source": "iana" + }, + "audio/parityfec": { + "source": "iana" + }, + "audio/pcma": { + "source": "iana" + }, + "audio/pcma-wb": { + "source": "iana" + }, + "audio/pcmu": { + "source": "iana" + }, + "audio/pcmu-wb": { + "source": "iana" + }, + "audio/prs.sid": { + "source": "iana" + }, + "audio/qcelp": { + "source": "iana" + }, + "audio/raptorfec": { + "source": "iana" + }, + "audio/red": { + "source": "iana" + }, + "audio/rtp-enc-aescm128": { + "source": "iana" + }, + "audio/rtp-midi": { + "source": "iana" + }, + "audio/rtploopback": { + "source": "iana" + }, + "audio/rtx": { + "source": "iana" + }, + "audio/s3m": { + "source": "apache", + "extensions": ["s3m"] + }, + "audio/scip": { + "source": "iana" + }, + "audio/silk": { + "source": "apache", + "extensions": ["sil"] + }, + "audio/smv": { + "source": "iana" + }, + "audio/smv-qcp": { + "source": "iana" + }, + "audio/smv0": { + "source": "iana" + }, + "audio/sofa": { + "source": "iana" + }, + "audio/sp-midi": { + "source": "iana" + }, + "audio/speex": { + "source": "iana" + }, + "audio/t140c": { + "source": "iana" + }, + "audio/t38": { + "source": "iana" + }, + "audio/telephone-event": { + "source": "iana" + }, + "audio/tetra_acelp": { + "source": "iana" + }, + "audio/tetra_acelp_bb": { + "source": "iana" + }, + "audio/tone": { + "source": "iana" + }, + "audio/tsvcis": { + "source": "iana" + }, + "audio/uemclip": { + "source": "iana" + }, + "audio/ulpfec": { + "source": "iana" + }, + "audio/usac": { + "source": "iana" + }, + "audio/vdvi": { + "source": "iana" + }, + "audio/vmr-wb": { + "source": "iana" + }, + "audio/vnd.3gpp.iufp": { + "source": "iana" + }, + "audio/vnd.4sb": { + "source": "iana" + }, + "audio/vnd.audiokoz": { + "source": "iana" + }, + "audio/vnd.celp": { + "source": "iana" + }, + "audio/vnd.cisco.nse": { + "source": "iana" + }, + "audio/vnd.cmles.radio-events": { + "source": "iana" + }, + "audio/vnd.cns.anp1": { + "source": "iana" + }, + "audio/vnd.cns.inf1": { + "source": "iana" + }, + "audio/vnd.dece.audio": { + "source": "iana", + "extensions": ["uva","uvva"] + }, + "audio/vnd.digital-winds": { + "source": "iana", + "extensions": ["eol"] + }, + "audio/vnd.dlna.adts": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.1": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.2": { + "source": "iana" + }, + "audio/vnd.dolby.mlp": { + "source": "iana" + }, + "audio/vnd.dolby.mps": { + "source": "iana" + }, + "audio/vnd.dolby.pl2": { + "source": "iana" + }, + "audio/vnd.dolby.pl2x": { + "source": "iana" + }, + "audio/vnd.dolby.pl2z": { + "source": "iana" + }, + "audio/vnd.dolby.pulse.1": { + "source": "iana" + }, + "audio/vnd.dra": { + "source": "iana", + "extensions": ["dra"] + }, + "audio/vnd.dts": { + "source": "iana", + "extensions": ["dts"] + }, + "audio/vnd.dts.hd": { + "source": "iana", + "extensions": ["dtshd"] + }, + "audio/vnd.dts.uhd": { + "source": "iana" + }, + "audio/vnd.dvb.file": { + "source": "iana" + }, + "audio/vnd.everad.plj": { + "source": "iana" + }, + "audio/vnd.hns.audio": { + "source": "iana" + }, + "audio/vnd.lucent.voice": { + "source": "iana", + "extensions": ["lvp"] + }, + "audio/vnd.ms-playready.media.pya": { + "source": "iana", + "extensions": ["pya"] + }, + "audio/vnd.nokia.mobile-xmf": { + "source": "iana" + }, + "audio/vnd.nortel.vbk": { + "source": "iana" + }, + "audio/vnd.nuera.ecelp4800": { + "source": "iana", + "extensions": ["ecelp4800"] + }, + "audio/vnd.nuera.ecelp7470": { + "source": "iana", + "extensions": ["ecelp7470"] + }, + "audio/vnd.nuera.ecelp9600": { + "source": "iana", + "extensions": ["ecelp9600"] + }, + "audio/vnd.octel.sbc": { + "source": "iana" + }, + "audio/vnd.presonus.multitrack": { + "source": "iana" + }, + "audio/vnd.qcelp": { + "source": "iana" + }, + "audio/vnd.rhetorex.32kadpcm": { + "source": "iana" + }, + "audio/vnd.rip": { + "source": "iana", + "extensions": ["rip"] + }, + "audio/vnd.rn-realaudio": { + "compressible": false + }, + "audio/vnd.sealedmedia.softseal.mpeg": { + "source": "iana" + }, + "audio/vnd.vmx.cvsd": { + "source": "iana" + }, + "audio/vnd.wave": { + "compressible": false + }, + "audio/vorbis": { + "source": "iana", + "compressible": false + }, + "audio/vorbis-config": { + "source": "iana" + }, + "audio/wav": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/wave": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/webm": { + "source": "apache", + "compressible": false, + "extensions": ["weba"] + }, + "audio/x-aac": { + "source": "apache", + "compressible": false, + "extensions": ["aac"] + }, + "audio/x-aiff": { + "source": "apache", + "extensions": ["aif","aiff","aifc"] + }, + "audio/x-caf": { + "source": "apache", + "compressible": false, + "extensions": ["caf"] + }, + "audio/x-flac": { + "source": "apache", + "extensions": ["flac"] + }, + "audio/x-m4a": { + "source": "nginx", + "extensions": ["m4a"] + }, + "audio/x-matroska": { + "source": "apache", + "extensions": ["mka"] + }, + "audio/x-mpegurl": { + "source": "apache", + "extensions": ["m3u"] + }, + "audio/x-ms-wax": { + "source": "apache", + "extensions": ["wax"] + }, + "audio/x-ms-wma": { + "source": "apache", + "extensions": ["wma"] + }, + "audio/x-pn-realaudio": { + "source": "apache", + "extensions": ["ram","ra"] + }, + "audio/x-pn-realaudio-plugin": { + "source": "apache", + "extensions": ["rmp"] + }, + "audio/x-realaudio": { + "source": "nginx", + "extensions": ["ra"] + }, + "audio/x-tta": { + "source": "apache" + }, + "audio/x-wav": { + "source": "apache", + "extensions": ["wav"] + }, + "audio/xm": { + "source": "apache", + "extensions": ["xm"] + }, + "chemical/x-cdx": { + "source": "apache", + "extensions": ["cdx"] + }, + "chemical/x-cif": { + "source": "apache", + "extensions": ["cif"] + }, + "chemical/x-cmdf": { + "source": "apache", + "extensions": ["cmdf"] + }, + "chemical/x-cml": { + "source": "apache", + "extensions": ["cml"] + }, + "chemical/x-csml": { + "source": "apache", + "extensions": ["csml"] + }, + "chemical/x-pdb": { + "source": "apache" + }, + "chemical/x-xyz": { + "source": "apache", + "extensions": ["xyz"] + }, + "font/collection": { + "source": "iana", + "extensions": ["ttc"] + }, + "font/otf": { + "source": "iana", + "compressible": true, + "extensions": ["otf"] + }, + "font/sfnt": { + "source": "iana" + }, + "font/ttf": { + "source": "iana", + "compressible": true, + "extensions": ["ttf"] + }, + "font/woff": { + "source": "iana", + "extensions": ["woff"] + }, + "font/woff2": { + "source": "iana", + "extensions": ["woff2"] + }, + "image/aces": { + "source": "iana", + "extensions": ["exr"] + }, + "image/apng": { + "compressible": false, + "extensions": ["apng"] + }, + "image/avci": { + "source": "iana", + "extensions": ["avci"] + }, + "image/avcs": { + "source": "iana", + "extensions": ["avcs"] + }, + "image/avif": { + "source": "iana", + "compressible": false, + "extensions": ["avif"] + }, + "image/bmp": { + "source": "iana", + "compressible": true, + "extensions": ["bmp"] + }, + "image/cgm": { + "source": "iana", + "extensions": ["cgm"] + }, + "image/dicom-rle": { + "source": "iana", + "extensions": ["drle"] + }, + "image/emf": { + "source": "iana", + "extensions": ["emf"] + }, + "image/fits": { + "source": "iana", + "extensions": ["fits"] + }, + "image/g3fax": { + "source": "iana", + "extensions": ["g3"] + }, + "image/gif": { + "source": "iana", + "compressible": false, + "extensions": ["gif"] + }, + "image/heic": { + "source": "iana", + "extensions": ["heic"] + }, + "image/heic-sequence": { + "source": "iana", + "extensions": ["heics"] + }, + "image/heif": { + "source": "iana", + "extensions": ["heif"] + }, + "image/heif-sequence": { + "source": "iana", + "extensions": ["heifs"] + }, + "image/hej2k": { + "source": "iana", + "extensions": ["hej2"] + }, + "image/hsj2": { + "source": "iana", + "extensions": ["hsj2"] + }, + "image/ief": { + "source": "iana", + "extensions": ["ief"] + }, + "image/jls": { + "source": "iana", + "extensions": ["jls"] + }, + "image/jp2": { + "source": "iana", + "compressible": false, + "extensions": ["jp2","jpg2"] + }, + "image/jpeg": { + "source": "iana", + "compressible": false, + "extensions": ["jpeg","jpg","jpe"] + }, + "image/jph": { + "source": "iana", + "extensions": ["jph"] + }, + "image/jphc": { + "source": "iana", + "extensions": ["jhc"] + }, + "image/jpm": { + "source": "iana", + "compressible": false, + "extensions": ["jpm"] + }, + "image/jpx": { + "source": "iana", + "compressible": false, + "extensions": ["jpx","jpf"] + }, + "image/jxr": { + "source": "iana", + "extensions": ["jxr"] + }, + "image/jxra": { + "source": "iana", + "extensions": ["jxra"] + }, + "image/jxrs": { + "source": "iana", + "extensions": ["jxrs"] + }, + "image/jxs": { + "source": "iana", + "extensions": ["jxs"] + }, + "image/jxsc": { + "source": "iana", + "extensions": ["jxsc"] + }, + "image/jxsi": { + "source": "iana", + "extensions": ["jxsi"] + }, + "image/jxss": { + "source": "iana", + "extensions": ["jxss"] + }, + "image/ktx": { + "source": "iana", + "extensions": ["ktx"] + }, + "image/ktx2": { + "source": "iana", + "extensions": ["ktx2"] + }, + "image/naplps": { + "source": "iana" + }, + "image/pjpeg": { + "compressible": false + }, + "image/png": { + "source": "iana", + "compressible": false, + "extensions": ["png"] + }, + "image/prs.btif": { + "source": "iana", + "extensions": ["btif"] + }, + "image/prs.pti": { + "source": "iana", + "extensions": ["pti"] + }, + "image/pwg-raster": { + "source": "iana" + }, + "image/sgi": { + "source": "apache", + "extensions": ["sgi"] + }, + "image/svg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["svg","svgz"] + }, + "image/t38": { + "source": "iana", + "extensions": ["t38"] + }, + "image/tiff": { + "source": "iana", + "compressible": false, + "extensions": ["tif","tiff"] + }, + "image/tiff-fx": { + "source": "iana", + "extensions": ["tfx"] + }, + "image/vnd.adobe.photoshop": { + "source": "iana", + "compressible": true, + "extensions": ["psd"] + }, + "image/vnd.airzip.accelerator.azv": { + "source": "iana", + "extensions": ["azv"] + }, + "image/vnd.cns.inf2": { + "source": "iana" + }, + "image/vnd.dece.graphic": { + "source": "iana", + "extensions": ["uvi","uvvi","uvg","uvvg"] + }, + "image/vnd.djvu": { + "source": "iana", + "extensions": ["djvu","djv"] + }, + "image/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "image/vnd.dwg": { + "source": "iana", + "extensions": ["dwg"] + }, + "image/vnd.dxf": { + "source": "iana", + "extensions": ["dxf"] + }, + "image/vnd.fastbidsheet": { + "source": "iana", + "extensions": ["fbs"] + }, + "image/vnd.fpx": { + "source": "iana", + "extensions": ["fpx"] + }, + "image/vnd.fst": { + "source": "iana", + "extensions": ["fst"] + }, + "image/vnd.fujixerox.edmics-mmr": { + "source": "iana", + "extensions": ["mmr"] + }, + "image/vnd.fujixerox.edmics-rlc": { + "source": "iana", + "extensions": ["rlc"] + }, + "image/vnd.globalgraphics.pgb": { + "source": "iana" + }, + "image/vnd.microsoft.icon": { + "source": "iana", + "compressible": true, + "extensions": ["ico"] + }, + "image/vnd.mix": { + "source": "iana" + }, + "image/vnd.mozilla.apng": { + "source": "iana" + }, + "image/vnd.ms-dds": { + "compressible": true, + "extensions": ["dds"] + }, + "image/vnd.ms-modi": { + "source": "iana", + "extensions": ["mdi"] + }, + "image/vnd.ms-photo": { + "source": "apache", + "extensions": ["wdp"] + }, + "image/vnd.net-fpx": { + "source": "iana", + "extensions": ["npx"] + }, + "image/vnd.pco.b16": { + "source": "iana", + "extensions": ["b16"] + }, + "image/vnd.radiance": { + "source": "iana" + }, + "image/vnd.sealed.png": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.gif": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.jpg": { + "source": "iana" + }, + "image/vnd.svf": { + "source": "iana" + }, + "image/vnd.tencent.tap": { + "source": "iana", + "extensions": ["tap"] + }, + "image/vnd.valve.source.texture": { + "source": "iana", + "extensions": ["vtf"] + }, + "image/vnd.wap.wbmp": { + "source": "iana", + "extensions": ["wbmp"] + }, + "image/vnd.xiff": { + "source": "iana", + "extensions": ["xif"] + }, + "image/vnd.zbrush.pcx": { + "source": "iana", + "extensions": ["pcx"] + }, + "image/webp": { + "source": "apache", + "extensions": ["webp"] + }, + "image/wmf": { + "source": "iana", + "extensions": ["wmf"] + }, + "image/x-3ds": { + "source": "apache", + "extensions": ["3ds"] + }, + "image/x-cmu-raster": { + "source": "apache", + "extensions": ["ras"] + }, + "image/x-cmx": { + "source": "apache", + "extensions": ["cmx"] + }, + "image/x-freehand": { + "source": "apache", + "extensions": ["fh","fhc","fh4","fh5","fh7"] + }, + "image/x-icon": { + "source": "apache", + "compressible": true, + "extensions": ["ico"] + }, + "image/x-jng": { + "source": "nginx", + "extensions": ["jng"] + }, + "image/x-mrsid-image": { + "source": "apache", + "extensions": ["sid"] + }, + "image/x-ms-bmp": { + "source": "nginx", + "compressible": true, + "extensions": ["bmp"] + }, + "image/x-pcx": { + "source": "apache", + "extensions": ["pcx"] + }, + "image/x-pict": { + "source": "apache", + "extensions": ["pic","pct"] + }, + "image/x-portable-anymap": { + "source": "apache", + "extensions": ["pnm"] + }, + "image/x-portable-bitmap": { + "source": "apache", + "extensions": ["pbm"] + }, + "image/x-portable-graymap": { + "source": "apache", + "extensions": ["pgm"] + }, + "image/x-portable-pixmap": { + "source": "apache", + "extensions": ["ppm"] + }, + "image/x-rgb": { + "source": "apache", + "extensions": ["rgb"] + }, + "image/x-tga": { + "source": "apache", + "extensions": ["tga"] + }, + "image/x-xbitmap": { + "source": "apache", + "extensions": ["xbm"] + }, + "image/x-xcf": { + "compressible": false + }, + "image/x-xpixmap": { + "source": "apache", + "extensions": ["xpm"] + }, + "image/x-xwindowdump": { + "source": "apache", + "extensions": ["xwd"] + }, + "message/cpim": { + "source": "iana" + }, + "message/delivery-status": { + "source": "iana" + }, + "message/disposition-notification": { + "source": "iana", + "extensions": [ + "disposition-notification" + ] + }, + "message/external-body": { + "source": "iana" + }, + "message/feedback-report": { + "source": "iana" + }, + "message/global": { + "source": "iana", + "extensions": ["u8msg"] + }, + "message/global-delivery-status": { + "source": "iana", + "extensions": ["u8dsn"] + }, + "message/global-disposition-notification": { + "source": "iana", + "extensions": ["u8mdn"] + }, + "message/global-headers": { + "source": "iana", + "extensions": ["u8hdr"] + }, + "message/http": { + "source": "iana", + "compressible": false + }, + "message/imdn+xml": { + "source": "iana", + "compressible": true + }, + "message/news": { + "source": "iana" + }, + "message/partial": { + "source": "iana", + "compressible": false + }, + "message/rfc822": { + "source": "iana", + "compressible": true, + "extensions": ["eml","mime"] + }, + "message/s-http": { + "source": "iana" + }, + "message/sip": { + "source": "iana" + }, + "message/sipfrag": { + "source": "iana" + }, + "message/tracking-status": { + "source": "iana" + }, + "message/vnd.si.simp": { + "source": "iana" + }, + "message/vnd.wfa.wsc": { + "source": "iana", + "extensions": ["wsc"] + }, + "model/3mf": { + "source": "iana", + "extensions": ["3mf"] + }, + "model/e57": { + "source": "iana" + }, + "model/gltf+json": { + "source": "iana", + "compressible": true, + "extensions": ["gltf"] + }, + "model/gltf-binary": { + "source": "iana", + "compressible": true, + "extensions": ["glb"] + }, + "model/iges": { + "source": "iana", + "compressible": false, + "extensions": ["igs","iges"] + }, + "model/mesh": { + "source": "iana", + "compressible": false, + "extensions": ["msh","mesh","silo"] + }, + "model/mtl": { + "source": "iana", + "extensions": ["mtl"] + }, + "model/obj": { + "source": "iana", + "extensions": ["obj"] + }, + "model/step": { + "source": "iana" + }, + "model/step+xml": { + "source": "iana", + "compressible": true, + "extensions": ["stpx"] + }, + "model/step+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpz"] + }, + "model/step-xml+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpxz"] + }, + "model/stl": { + "source": "iana", + "extensions": ["stl"] + }, + "model/vnd.collada+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dae"] + }, + "model/vnd.dwf": { + "source": "iana", + "extensions": ["dwf"] + }, + "model/vnd.flatland.3dml": { + "source": "iana" + }, + "model/vnd.gdl": { + "source": "iana", + "extensions": ["gdl"] + }, + "model/vnd.gs-gdl": { + "source": "apache" + }, + "model/vnd.gs.gdl": { + "source": "iana" + }, + "model/vnd.gtw": { + "source": "iana", + "extensions": ["gtw"] + }, + "model/vnd.moml+xml": { + "source": "iana", + "compressible": true + }, + "model/vnd.mts": { + "source": "iana", + "extensions": ["mts"] + }, + "model/vnd.opengex": { + "source": "iana", + "extensions": ["ogex"] + }, + "model/vnd.parasolid.transmit.binary": { + "source": "iana", + "extensions": ["x_b"] + }, + "model/vnd.parasolid.transmit.text": { + "source": "iana", + "extensions": ["x_t"] + }, + "model/vnd.pytha.pyox": { + "source": "iana" + }, + "model/vnd.rosette.annotated-data-model": { + "source": "iana" + }, + "model/vnd.sap.vds": { + "source": "iana", + "extensions": ["vds"] + }, + "model/vnd.usdz+zip": { + "source": "iana", + "compressible": false, + "extensions": ["usdz"] + }, + "model/vnd.valve.source.compiled-map": { + "source": "iana", + "extensions": ["bsp"] + }, + "model/vnd.vtu": { + "source": "iana", + "extensions": ["vtu"] + }, + "model/vrml": { + "source": "iana", + "compressible": false, + "extensions": ["wrl","vrml"] + }, + "model/x3d+binary": { + "source": "apache", + "compressible": false, + "extensions": ["x3db","x3dbz"] + }, + "model/x3d+fastinfoset": { + "source": "iana", + "extensions": ["x3db"] + }, + "model/x3d+vrml": { + "source": "apache", + "compressible": false, + "extensions": ["x3dv","x3dvz"] + }, + "model/x3d+xml": { + "source": "iana", + "compressible": true, + "extensions": ["x3d","x3dz"] + }, + "model/x3d-vrml": { + "source": "iana", + "extensions": ["x3dv"] + }, + "multipart/alternative": { + "source": "iana", + "compressible": false + }, + "multipart/appledouble": { + "source": "iana" + }, + "multipart/byteranges": { + "source": "iana" + }, + "multipart/digest": { + "source": "iana" + }, + "multipart/encrypted": { + "source": "iana", + "compressible": false + }, + "multipart/form-data": { + "source": "iana", + "compressible": false + }, + "multipart/header-set": { + "source": "iana" + }, + "multipart/mixed": { + "source": "iana" + }, + "multipart/multilingual": { + "source": "iana" + }, + "multipart/parallel": { + "source": "iana" + }, + "multipart/related": { + "source": "iana", + "compressible": false + }, + "multipart/report": { + "source": "iana" + }, + "multipart/signed": { + "source": "iana", + "compressible": false + }, + "multipart/vnd.bint.med-plus": { + "source": "iana" + }, + "multipart/voice-message": { + "source": "iana" + }, + "multipart/x-mixed-replace": { + "source": "iana" + }, + "text/1d-interleaved-parityfec": { + "source": "iana" + }, + "text/cache-manifest": { + "source": "iana", + "compressible": true, + "extensions": ["appcache","manifest"] + }, + "text/calendar": { + "source": "iana", + "extensions": ["ics","ifb"] + }, + "text/calender": { + "compressible": true + }, + "text/cmd": { + "compressible": true + }, + "text/coffeescript": { + "extensions": ["coffee","litcoffee"] + }, + "text/cql": { + "source": "iana" + }, + "text/cql-expression": { + "source": "iana" + }, + "text/cql-identifier": { + "source": "iana" + }, + "text/css": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["css"] + }, + "text/csv": { + "source": "iana", + "compressible": true, + "extensions": ["csv"] + }, + "text/csv-schema": { + "source": "iana" + }, + "text/directory": { + "source": "iana" + }, + "text/dns": { + "source": "iana" + }, + "text/ecmascript": { + "source": "iana" + }, + "text/encaprtp": { + "source": "iana" + }, + "text/enriched": { + "source": "iana" + }, + "text/fhirpath": { + "source": "iana" + }, + "text/flexfec": { + "source": "iana" + }, + "text/fwdred": { + "source": "iana" + }, + "text/gff3": { + "source": "iana" + }, + "text/grammar-ref-list": { + "source": "iana" + }, + "text/html": { + "source": "iana", + "compressible": true, + "extensions": ["html","htm","shtml"] + }, + "text/jade": { + "extensions": ["jade"] + }, + "text/javascript": { + "source": "iana", + "compressible": true + }, + "text/jcr-cnd": { + "source": "iana" + }, + "text/jsx": { + "compressible": true, + "extensions": ["jsx"] + }, + "text/less": { + "compressible": true, + "extensions": ["less"] + }, + "text/markdown": { + "source": "iana", + "compressible": true, + "extensions": ["markdown","md"] + }, + "text/mathml": { + "source": "nginx", + "extensions": ["mml"] + }, + "text/mdx": { + "compressible": true, + "extensions": ["mdx"] + }, + "text/mizar": { + "source": "iana" + }, + "text/n3": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["n3"] + }, + "text/parameters": { + "source": "iana", + "charset": "UTF-8" + }, + "text/parityfec": { + "source": "iana" + }, + "text/plain": { + "source": "iana", + "compressible": true, + "extensions": ["txt","text","conf","def","list","log","in","ini"] + }, + "text/provenance-notation": { + "source": "iana", + "charset": "UTF-8" + }, + "text/prs.fallenstein.rst": { + "source": "iana" + }, + "text/prs.lines.tag": { + "source": "iana", + "extensions": ["dsc"] + }, + "text/prs.prop.logic": { + "source": "iana" + }, + "text/raptorfec": { + "source": "iana" + }, + "text/red": { + "source": "iana" + }, + "text/rfc822-headers": { + "source": "iana" + }, + "text/richtext": { + "source": "iana", + "compressible": true, + "extensions": ["rtx"] + }, + "text/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "text/rtp-enc-aescm128": { + "source": "iana" + }, + "text/rtploopback": { + "source": "iana" + }, + "text/rtx": { + "source": "iana" + }, + "text/sgml": { + "source": "iana", + "extensions": ["sgml","sgm"] + }, + "text/shaclc": { + "source": "iana" + }, + "text/shex": { + "source": "iana", + "extensions": ["shex"] + }, + "text/slim": { + "extensions": ["slim","slm"] + }, + "text/spdx": { + "source": "iana", + "extensions": ["spdx"] + }, + "text/strings": { + "source": "iana" + }, + "text/stylus": { + "extensions": ["stylus","styl"] + }, + "text/t140": { + "source": "iana" + }, + "text/tab-separated-values": { + "source": "iana", + "compressible": true, + "extensions": ["tsv"] + }, + "text/troff": { + "source": "iana", + "extensions": ["t","tr","roff","man","me","ms"] + }, + "text/turtle": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["ttl"] + }, + "text/ulpfec": { + "source": "iana" + }, + "text/uri-list": { + "source": "iana", + "compressible": true, + "extensions": ["uri","uris","urls"] + }, + "text/vcard": { + "source": "iana", + "compressible": true, + "extensions": ["vcard"] + }, + "text/vnd.a": { + "source": "iana" + }, + "text/vnd.abc": { + "source": "iana" + }, + "text/vnd.ascii-art": { + "source": "iana" + }, + "text/vnd.curl": { + "source": "iana", + "extensions": ["curl"] + }, + "text/vnd.curl.dcurl": { + "source": "apache", + "extensions": ["dcurl"] + }, + "text/vnd.curl.mcurl": { + "source": "apache", + "extensions": ["mcurl"] + }, + "text/vnd.curl.scurl": { + "source": "apache", + "extensions": ["scurl"] + }, + "text/vnd.debian.copyright": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.dmclientscript": { + "source": "iana" + }, + "text/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "text/vnd.esmertec.theme-descriptor": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.familysearch.gedcom": { + "source": "iana", + "extensions": ["ged"] + }, + "text/vnd.ficlab.flt": { + "source": "iana" + }, + "text/vnd.fly": { + "source": "iana", + "extensions": ["fly"] + }, + "text/vnd.fmi.flexstor": { + "source": "iana", + "extensions": ["flx"] + }, + "text/vnd.gml": { + "source": "iana" + }, + "text/vnd.graphviz": { + "source": "iana", + "extensions": ["gv"] + }, + "text/vnd.hans": { + "source": "iana" + }, + "text/vnd.hgl": { + "source": "iana" + }, + "text/vnd.in3d.3dml": { + "source": "iana", + "extensions": ["3dml"] + }, + "text/vnd.in3d.spot": { + "source": "iana", + "extensions": ["spot"] + }, + "text/vnd.iptc.newsml": { + "source": "iana" + }, + "text/vnd.iptc.nitf": { + "source": "iana" + }, + "text/vnd.latex-z": { + "source": "iana" + }, + "text/vnd.motorola.reflex": { + "source": "iana" + }, + "text/vnd.ms-mediapackage": { + "source": "iana" + }, + "text/vnd.net2phone.commcenter.command": { + "source": "iana" + }, + "text/vnd.radisys.msml-basic-layout": { + "source": "iana" + }, + "text/vnd.senx.warpscript": { + "source": "iana" + }, + "text/vnd.si.uricatalogue": { + "source": "iana" + }, + "text/vnd.sosi": { + "source": "iana" + }, + "text/vnd.sun.j2me.app-descriptor": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["jad"] + }, + "text/vnd.trolltech.linguist": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.wap.si": { + "source": "iana" + }, + "text/vnd.wap.sl": { + "source": "iana" + }, + "text/vnd.wap.wml": { + "source": "iana", + "extensions": ["wml"] + }, + "text/vnd.wap.wmlscript": { + "source": "iana", + "extensions": ["wmls"] + }, + "text/vtt": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["vtt"] + }, + "text/x-asm": { + "source": "apache", + "extensions": ["s","asm"] + }, + "text/x-c": { + "source": "apache", + "extensions": ["c","cc","cxx","cpp","h","hh","dic"] + }, + "text/x-component": { + "source": "nginx", + "extensions": ["htc"] + }, + "text/x-fortran": { + "source": "apache", + "extensions": ["f","for","f77","f90"] + }, + "text/x-gwt-rpc": { + "compressible": true + }, + "text/x-handlebars-template": { + "extensions": ["hbs"] + }, + "text/x-java-source": { + "source": "apache", + "extensions": ["java"] + }, + "text/x-jquery-tmpl": { + "compressible": true + }, + "text/x-lua": { + "extensions": ["lua"] + }, + "text/x-markdown": { + "compressible": true, + "extensions": ["mkd"] + }, + "text/x-nfo": { + "source": "apache", + "extensions": ["nfo"] + }, + "text/x-opml": { + "source": "apache", + "extensions": ["opml"] + }, + "text/x-org": { + "compressible": true, + "extensions": ["org"] + }, + "text/x-pascal": { + "source": "apache", + "extensions": ["p","pas"] + }, + "text/x-processing": { + "compressible": true, + "extensions": ["pde"] + }, + "text/x-sass": { + "extensions": ["sass"] + }, + "text/x-scss": { + "extensions": ["scss"] + }, + "text/x-setext": { + "source": "apache", + "extensions": ["etx"] + }, + "text/x-sfv": { + "source": "apache", + "extensions": ["sfv"] + }, + "text/x-suse-ymp": { + "compressible": true, + "extensions": ["ymp"] + }, + "text/x-uuencode": { + "source": "apache", + "extensions": ["uu"] + }, + "text/x-vcalendar": { + "source": "apache", + "extensions": ["vcs"] + }, + "text/x-vcard": { + "source": "apache", + "extensions": ["vcf"] + }, + "text/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml"] + }, + "text/xml-external-parsed-entity": { + "source": "iana" + }, + "text/yaml": { + "compressible": true, + "extensions": ["yaml","yml"] + }, + "video/1d-interleaved-parityfec": { + "source": "iana" + }, + "video/3gpp": { + "source": "iana", + "extensions": ["3gp","3gpp"] + }, + "video/3gpp-tt": { + "source": "iana" + }, + "video/3gpp2": { + "source": "iana", + "extensions": ["3g2"] + }, + "video/av1": { + "source": "iana" + }, + "video/bmpeg": { + "source": "iana" + }, + "video/bt656": { + "source": "iana" + }, + "video/celb": { + "source": "iana" + }, + "video/dv": { + "source": "iana" + }, + "video/encaprtp": { + "source": "iana" + }, + "video/ffv1": { + "source": "iana" + }, + "video/flexfec": { + "source": "iana" + }, + "video/h261": { + "source": "iana", + "extensions": ["h261"] + }, + "video/h263": { + "source": "iana", + "extensions": ["h263"] + }, + "video/h263-1998": { + "source": "iana" + }, + "video/h263-2000": { + "source": "iana" + }, + "video/h264": { + "source": "iana", + "extensions": ["h264"] + }, + "video/h264-rcdo": { + "source": "iana" + }, + "video/h264-svc": { + "source": "iana" + }, + "video/h265": { + "source": "iana" + }, + "video/iso.segment": { + "source": "iana", + "extensions": ["m4s"] + }, + "video/jpeg": { + "source": "iana", + "extensions": ["jpgv"] + }, + "video/jpeg2000": { + "source": "iana" + }, + "video/jpm": { + "source": "apache", + "extensions": ["jpm","jpgm"] + }, + "video/jxsv": { + "source": "iana" + }, + "video/mj2": { + "source": "iana", + "extensions": ["mj2","mjp2"] + }, + "video/mp1s": { + "source": "iana" + }, + "video/mp2p": { + "source": "iana" + }, + "video/mp2t": { + "source": "iana", + "extensions": ["ts"] + }, + "video/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["mp4","mp4v","mpg4"] + }, + "video/mp4v-es": { + "source": "iana" + }, + "video/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpeg","mpg","mpe","m1v","m2v"] + }, + "video/mpeg4-generic": { + "source": "iana" + }, + "video/mpv": { + "source": "iana" + }, + "video/nv": { + "source": "iana" + }, + "video/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogv"] + }, + "video/parityfec": { + "source": "iana" + }, + "video/pointer": { + "source": "iana" + }, + "video/quicktime": { + "source": "iana", + "compressible": false, + "extensions": ["qt","mov"] + }, + "video/raptorfec": { + "source": "iana" + }, + "video/raw": { + "source": "iana" + }, + "video/rtp-enc-aescm128": { + "source": "iana" + }, + "video/rtploopback": { + "source": "iana" + }, + "video/rtx": { + "source": "iana" + }, + "video/scip": { + "source": "iana" + }, + "video/smpte291": { + "source": "iana" + }, + "video/smpte292m": { + "source": "iana" + }, + "video/ulpfec": { + "source": "iana" + }, + "video/vc1": { + "source": "iana" + }, + "video/vc2": { + "source": "iana" + }, + "video/vnd.cctv": { + "source": "iana" + }, + "video/vnd.dece.hd": { + "source": "iana", + "extensions": ["uvh","uvvh"] + }, + "video/vnd.dece.mobile": { + "source": "iana", + "extensions": ["uvm","uvvm"] + }, + "video/vnd.dece.mp4": { + "source": "iana" + }, + "video/vnd.dece.pd": { + "source": "iana", + "extensions": ["uvp","uvvp"] + }, + "video/vnd.dece.sd": { + "source": "iana", + "extensions": ["uvs","uvvs"] + }, + "video/vnd.dece.video": { + "source": "iana", + "extensions": ["uvv","uvvv"] + }, + "video/vnd.directv.mpeg": { + "source": "iana" + }, + "video/vnd.directv.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dlna.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dvb.file": { + "source": "iana", + "extensions": ["dvb"] + }, + "video/vnd.fvt": { + "source": "iana", + "extensions": ["fvt"] + }, + "video/vnd.hns.video": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsavc": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsmpeg2": { + "source": "iana" + }, + "video/vnd.motorola.video": { + "source": "iana" + }, + "video/vnd.motorola.videop": { + "source": "iana" + }, + "video/vnd.mpegurl": { + "source": "iana", + "extensions": ["mxu","m4u"] + }, + "video/vnd.ms-playready.media.pyv": { + "source": "iana", + "extensions": ["pyv"] + }, + "video/vnd.nokia.interleaved-multimedia": { + "source": "iana" + }, + "video/vnd.nokia.mp4vr": { + "source": "iana" + }, + "video/vnd.nokia.videovoip": { + "source": "iana" + }, + "video/vnd.objectvideo": { + "source": "iana" + }, + "video/vnd.radgamettools.bink": { + "source": "iana" + }, + "video/vnd.radgamettools.smacker": { + "source": "iana" + }, + "video/vnd.sealed.mpeg1": { + "source": "iana" + }, + "video/vnd.sealed.mpeg4": { + "source": "iana" + }, + "video/vnd.sealed.swf": { + "source": "iana" + }, + "video/vnd.sealedmedia.softseal.mov": { + "source": "iana" + }, + "video/vnd.uvvu.mp4": { + "source": "iana", + "extensions": ["uvu","uvvu"] + }, + "video/vnd.vivo": { + "source": "iana", + "extensions": ["viv"] + }, + "video/vnd.youtube.yt": { + "source": "iana" + }, + "video/vp8": { + "source": "iana" + }, + "video/vp9": { + "source": "iana" + }, + "video/webm": { + "source": "apache", + "compressible": false, + "extensions": ["webm"] + }, + "video/x-f4v": { + "source": "apache", + "extensions": ["f4v"] + }, + "video/x-fli": { + "source": "apache", + "extensions": ["fli"] + }, + "video/x-flv": { + "source": "apache", + "compressible": false, + "extensions": ["flv"] + }, + "video/x-m4v": { + "source": "apache", + "extensions": ["m4v"] + }, + "video/x-matroska": { + "source": "apache", + "compressible": false, + "extensions": ["mkv","mk3d","mks"] + }, + "video/x-mng": { + "source": "apache", + "extensions": ["mng"] + }, + "video/x-ms-asf": { + "source": "apache", + "extensions": ["asf","asx"] + }, + "video/x-ms-vob": { + "source": "apache", + "extensions": ["vob"] + }, + "video/x-ms-wm": { + "source": "apache", + "extensions": ["wm"] + }, + "video/x-ms-wmv": { + "source": "apache", + "compressible": false, + "extensions": ["wmv"] + }, + "video/x-ms-wmx": { + "source": "apache", + "extensions": ["wmx"] + }, + "video/x-ms-wvx": { + "source": "apache", + "extensions": ["wvx"] + }, + "video/x-msvideo": { + "source": "apache", + "extensions": ["avi"] + }, + "video/x-sgi-movie": { + "source": "apache", + "extensions": ["movie"] + }, + "video/x-smv": { + "source": "apache", + "extensions": ["smv"] + }, + "x-conference/x-cooltalk": { + "source": "apache", + "extensions": ["ice"] + }, + "x-shader/x-fragment": { + "compressible": true + }, + "x-shader/x-vertex": { + "compressible": true + } +} diff --git a/node_modules/mime-db/index.js b/node_modules/mime-db/index.js new file mode 100644 index 0000000..ec2be30 --- /dev/null +++ b/node_modules/mime-db/index.js @@ -0,0 +1,12 @@ +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = require('./db.json') diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json new file mode 100644 index 0000000..32c14b8 --- /dev/null +++ b/node_modules/mime-db/package.json @@ -0,0 +1,60 @@ +{ + "name": "mime-db", + "description": "Media Type Database", + "version": "1.52.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Robert Kieffer (http://github.com/broofa)" + ], + "license": "MIT", + "keywords": [ + "mime", + "db", + "type", + "types", + "database", + "charset", + "charsets" + ], + "repository": "jshttp/mime-db", + "devDependencies": { + "bluebird": "3.7.2", + "co": "4.6.0", + "cogent": "1.0.1", + "csv-parse": "4.16.3", + "eslint": "7.32.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.1.1", + "eslint-plugin-standard": "4.1.0", + "gnode": "0.1.2", + "media-typer": "1.1.0", + "mocha": "9.2.1", + "nyc": "15.1.0", + "raw-body": "2.5.0", + "stream-to-array": "2.3.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "db.json", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build", + "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "update": "npm run fetch && npm run build", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md new file mode 100644 index 0000000..c5043b7 --- /dev/null +++ b/node_modules/mime-types/HISTORY.md @@ -0,0 +1,397 @@ +2.1.35 / 2022-03-12 +=================== + + * deps: mime-db@1.52.0 + - Add extensions from IANA for more `image/*` types + - Add extension `.asc` to `application/pgp-keys` + - Add extensions to various XML types + - Add new upstream MIME types + +2.1.34 / 2021-11-08 +=================== + + * deps: mime-db@1.51.0 + - Add new upstream MIME types + +2.1.33 / 2021-10-01 +=================== + + * deps: mime-db@1.50.0 + - Add deprecated iWorks mime types and extensions + - Add new upstream MIME types + +2.1.32 / 2021-07-27 +=================== + + * deps: mime-db@1.49.0 + - Add extension `.trig` to `application/trig` + - Add new upstream MIME types + +2.1.31 / 2021-06-01 +=================== + + * deps: mime-db@1.48.0 + - Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + - Add new upstream MIME types + +2.1.30 / 2021-04-02 +=================== + + * deps: mime-db@1.47.0 + - Add extension `.amr` to `audio/amr` + - Remove ambigious extensions from IANA for `application/*+xml` types + - Update primary extension to `.es` for `application/ecmascript` + +2.1.29 / 2021-02-17 +=================== + + * deps: mime-db@1.46.0 + - Add extension `.amr` to `audio/amr` + - Add extension `.m4s` to `video/iso.segment` + - Add extension `.opus` to `audio/ogg` + - Add new upstream MIME types + +2.1.28 / 2021-01-01 +=================== + + * deps: mime-db@1.45.0 + - Add `application/ubjson` with extension `.ubj` + - Add `image/avif` with extension `.avif` + - Add `image/ktx2` with extension `.ktx2` + - Add extension `.dbf` to `application/vnd.dbf` + - Add extension `.rar` to `application/vnd.rar` + - Add extension `.td` to `application/urc-targetdesc+xml` + - Add new upstream MIME types + - Fix extension of `application/vnd.apple.keynote` to be `.key` + +2.1.27 / 2020-04-23 +=================== + + * deps: mime-db@1.44.0 + - Add charsets from IANA + - Add extension `.cjs` to `application/node` + - Add new upstream MIME types + +2.1.26 / 2020-01-05 +=================== + + * deps: mime-db@1.43.0 + - Add `application/x-keepass2` with extension `.kdbx` + - Add extension `.mxmf` to `audio/mobile-xmf` + - Add extensions from IANA for `application/*+xml` types + - Add new upstream MIME types + +2.1.25 / 2019-11-12 +=================== + + * deps: mime-db@1.42.0 + - Add new upstream MIME types + - Add `application/toml` with extension `.toml` + - Add `image/vnd.ms-dds` with extension `.dds` + +2.1.24 / 2019-04-20 +=================== + + * deps: mime-db@1.40.0 + - Add extensions from IANA for `model/*` types + - Add `text/mdx` with extension `.mdx` + +2.1.23 / 2019-04-17 +=================== + + * deps: mime-db@~1.39.0 + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add new upstream MIME types + +2.1.22 / 2019-02-14 +=================== + + * deps: mime-db@~1.38.0 + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add new upstream MIME types + +2.1.21 / 2018-10-19 +=================== + + * deps: mime-db@~1.37.0 + - Add extensions to HEIC image types + - Add new upstream MIME types + +2.1.20 / 2018-08-26 +=================== + + * deps: mime-db@~1.36.0 + - Add Apple file extensions from IANA + - Add extensions from IANA for `image/*` types + - Add new upstream MIME types + +2.1.19 / 2018-07-17 +=================== + + * deps: mime-db@~1.35.0 + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.owl` to `application/rdf+xml` + - Add new upstream MIME types + - Add UTF-8 as default charset for `text/turtle` + +2.1.18 / 2018-02-16 +=================== + + * deps: mime-db@~1.33.0 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add new upstream MIME types + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +2.1.17 / 2017-09-01 +=================== + + * deps: mime-db@~1.30.0 + - Add `application/vnd.ms-outlook` + - Add `application/x-arj` + - Add extension `.mjs` to `application/javascript` + - Add glTF types and extensions + - Add new upstream MIME types + - Add `text/x-org` + - Add VirtualBox MIME types + - Fix `source` records for `video/*` types that are IANA + - Update `font/opentype` to registered `font/otf` + +2.1.16 / 2017-07-24 +=================== + + * deps: mime-db@~1.29.0 + - Add `application/fido.trusted-apps+json` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add new upstream MIME types + - Update extensions `.md` and `.markdown` to be `text/markdown` + +2.1.15 / 2017-03-23 +=================== + + * deps: mime-db@~1.27.0 + - Add new mime types + - Add `image/apng` + +2.1.14 / 2017-01-14 +=================== + + * deps: mime-db@~1.26.0 + - Add new mime types + +2.1.13 / 2016-11-18 +=================== + + * deps: mime-db@~1.25.0 + - Add new mime types + +2.1.12 / 2016-09-18 +=================== + + * deps: mime-db@~1.24.0 + - Add new mime types + - Add `audio/mp3` + +2.1.11 / 2016-05-01 +=================== + + * deps: mime-db@~1.23.0 + - Add new mime types + +2.1.10 / 2016-02-15 +=================== + + * deps: mime-db@~1.22.0 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +2.1.9 / 2016-01-06 +================== + + * deps: mime-db@~1.21.0 + - Add new mime types + +2.1.8 / 2015-11-30 +================== + + * deps: mime-db@~1.20.0 + - Add new mime types + +2.1.7 / 2015-09-20 +================== + + * deps: mime-db@~1.19.0 + - Add new mime types + +2.1.6 / 2015-09-03 +================== + + * deps: mime-db@~1.18.0 + - Add new mime types + +2.1.5 / 2015-08-20 +================== + + * deps: mime-db@~1.17.0 + - Add new mime types + +2.1.4 / 2015-07-30 +================== + + * deps: mime-db@~1.16.0 + - Add new mime types + +2.1.3 / 2015-07-13 +================== + + * deps: mime-db@~1.15.0 + - Add new mime types + +2.1.2 / 2015-06-25 +================== + + * deps: mime-db@~1.14.0 + - Add new mime types + +2.1.1 / 2015-06-08 +================== + + * perf: fix deopt during mapping + +2.1.0 / 2015-06-07 +================== + + * Fix incorrectly treating extension-less file name as extension + - i.e. `'path/to/json'` will no longer return `application/json` + * Fix `.charset(type)` to accept parameters + * Fix `.charset(type)` to match case-insensitive + * Improve generation of extension to MIME mapping + * Refactor internals for readability and no argument reassignment + * Prefer `application/*` MIME types from the same source + * Prefer any type over `application/octet-stream` + * deps: mime-db@~1.13.0 + - Add nginx as a source + - Add new mime types + +2.0.14 / 2015-06-06 +=================== + + * deps: mime-db@~1.12.0 + - Add new mime types + +2.0.13 / 2015-05-31 +=================== + + * deps: mime-db@~1.11.0 + - Add new mime types + +2.0.12 / 2015-05-19 +=================== + + * deps: mime-db@~1.10.0 + - Add new mime types + +2.0.11 / 2015-05-05 +=================== + + * deps: mime-db@~1.9.1 + - Add new mime types + +2.0.10 / 2015-03-13 +=================== + + * deps: mime-db@~1.8.0 + - Add new mime types + +2.0.9 / 2015-02-09 +================== + + * deps: mime-db@~1.7.0 + - Add new mime types + - Community extensions ownership transferred from `node-mime` + +2.0.8 / 2015-01-29 +================== + + * deps: mime-db@~1.6.0 + - Add new mime types + +2.0.7 / 2014-12-30 +================== + + * deps: mime-db@~1.5.0 + - Add new mime types + - Fix various invalid MIME type entries + +2.0.6 / 2014-12-30 +================== + + * deps: mime-db@~1.4.0 + - Add new mime types + - Fix various invalid MIME type entries + - Remove example template MIME types + +2.0.5 / 2014-12-29 +================== + + * deps: mime-db@~1.3.1 + - Fix missing extensions + +2.0.4 / 2014-12-10 +================== + + * deps: mime-db@~1.3.0 + - Add new mime types + +2.0.3 / 2014-11-09 +================== + + * deps: mime-db@~1.2.0 + - Add new mime types + +2.0.2 / 2014-09-28 +================== + + * deps: mime-db@~1.1.0 + - Add new mime types + - Update charsets + +2.0.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + +2.0.0 / 2014-09-02 +================== + + * Use `mime-db` + * Remove `.define()` + +1.0.2 / 2014-08-04 +================== + + * Set charset=utf-8 for `text/javascript` + +1.0.1 / 2014-06-24 +================== + + * Add `text/jsx` type + +1.0.0 / 2014-05-12 +================== + + * Return `false` for unknown types + * Set charset=utf-8 for `application/json` + +0.1.0 / 2014-05-02 +================== + + * Initial release diff --git a/node_modules/mime-types/LICENSE b/node_modules/mime-types/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/mime-types/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md new file mode 100644 index 0000000..48d2fb4 --- /dev/null +++ b/node_modules/mime-types/README.md @@ -0,0 +1,113 @@ +# mime-types + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +The ultimate javascript content-type utility. + +Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: + +- __No fallbacks.__ Instead of naively returning the first available type, + `mime-types` simply returns `false`, so do + `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. +- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. +- No `.define()` functionality +- Bug fixes for `.lookup(path)` + +Otherwise, the API is compatible with `mime` 1.x. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install mime-types +``` + +## Adding Types + +All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), +so open a PR there if you'd like to add mime types. + +## API + +```js +var mime = require('mime-types') +``` + +All functions return `false` if input is invalid or not found. + +### mime.lookup(path) + +Lookup the content-type associated with a file. + +```js +mime.lookup('json') // 'application/json' +mime.lookup('.md') // 'text/markdown' +mime.lookup('file.html') // 'text/html' +mime.lookup('folder/file.js') // 'application/javascript' +mime.lookup('folder/.htaccess') // false + +mime.lookup('cats') // false +``` + +### mime.contentType(type) + +Create a full content-type header given a content-type or extension. +When given an extension, `mime.lookup` is used to get the matching +content-type, otherwise the given content-type is used. Then if the +content-type does not already have a `charset` parameter, `mime.charset` +is used to get the default charset and add to the returned content-type. + +```js +mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' +mime.contentType('file.json') // 'application/json; charset=utf-8' +mime.contentType('text/html') // 'text/html; charset=utf-8' +mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' + +// from a full path +mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' +``` + +### mime.extension(type) + +Get the default extension for a content-type. + +```js +mime.extension('application/octet-stream') // 'bin' +``` + +### mime.charset(type) + +Lookup the implied default charset of a content-type. + +```js +mime.charset('text/markdown') // 'UTF-8' +``` + +### var type = mime.types[extension] + +A map of content-types by extension. + +### [extensions...] = mime.extensions[type] + +A map of extensions by content-type. + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-types/master?label=ci +[ci-url]: https://github.com/jshttp/mime-types/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master +[node-version-image]: https://badgen.net/npm/node/mime-types +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-types +[npm-url]: https://npmjs.org/package/mime-types +[npm-version-image]: https://badgen.net/npm/v/mime-types diff --git a/node_modules/mime-types/index.js b/node_modules/mime-types/index.js new file mode 100644 index 0000000..b9f34d5 --- /dev/null +++ b/node_modules/mime-types/index.js @@ -0,0 +1,188 @@ +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') +var extname = require('path').extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json new file mode 100644 index 0000000..bbef696 --- /dev/null +++ b/node_modules/mime-types/package.json @@ -0,0 +1,44 @@ +{ + "name": "mime-types", + "description": "The ultimate javascript content-type utility.", + "version": "2.1.35", + "contributors": [ + "Douglas Christopher Wilson ", + "Jeremiah Senkpiel (https://searchbeam.jit.su)", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "mime", + "types" + ], + "repository": "jshttp/mime-types", + "dependencies": { + "mime-db": "1.52.0" + }, + "devDependencies": { + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.2.0", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.2", + "nyc": "15.1.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec test/test.js", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 0000000..33ede1d --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,230 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### allowWindowsEscape + +Windows path separator `\` is by default converted to `/`, which +prohibits the usage of `\` as a escape character. This flag skips that +behavior and allows using the escape character. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..fda45ad --- /dev/null +++ b/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 0000000..566efdf --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.1.2", + "publishConfig": { + "tag": "v3-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/node-fetch/LICENSE.md b/node_modules/node-fetch/LICENSE.md new file mode 100644 index 0000000..660ffec --- /dev/null +++ b/node_modules/node-fetch/LICENSE.md @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/node-fetch/README.md b/node_modules/node-fetch/README.md new file mode 100644 index 0000000..55f09b7 --- /dev/null +++ b/node_modules/node-fetch/README.md @@ -0,0 +1,634 @@ +node-fetch +========== + +[![npm version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![coverage status][codecov-image]][codecov-url] +[![install size][install-size-image]][install-size-url] +[![Discord][discord-image]][discord-url] + +A light-weight module that brings `window.fetch` to Node.js + +(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) + +[![Backers][opencollective-image]][opencollective-url] + + + +- [Motivation](#motivation) +- [Features](#features) +- [Difference from client-side fetch](#difference-from-client-side-fetch) +- [Installation](#installation) +- [Loading and configuring the module](#loading-and-configuring-the-module) +- [Common Usage](#common-usage) + - [Plain text or HTML](#plain-text-or-html) + - [JSON](#json) + - [Simple Post](#simple-post) + - [Post with JSON](#post-with-json) + - [Post with form parameters](#post-with-form-parameters) + - [Handling exceptions](#handling-exceptions) + - [Handling client and server errors](#handling-client-and-server-errors) +- [Advanced Usage](#advanced-usage) + - [Streams](#streams) + - [Buffer](#buffer) + - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) + - [Extract Set-Cookie Header](#extract-set-cookie-header) + - [Post data using a file stream](#post-data-using-a-file-stream) + - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) + - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) +- [API](#api) + - [fetch(url[, options])](#fetchurl-options) + - [Options](#options) + - [Class: Request](#class-request) + - [Class: Response](#class-response) + - [Class: Headers](#class-headers) + - [Interface: Body](#interface-body) + - [Class: FetchError](#class-fetcherror) +- [License](#license) +- [Acknowledgement](#acknowledgement) + + + +## Motivation + +Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. + +See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). + +## Features + +- Stay consistent with `window.fetch` API. +- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. +- Use native promise but allow substituting it with [insert your favorite promise library]. +- Use native Node streams for body on both request and response. +- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. +- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. + +## Difference from client-side fetch + +- See [Known Differences](LIMITS.md) for details. +- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. +- Pull requests are welcomed too! + +## Installation + +Current stable release (`2.x`) + +```sh +$ npm install node-fetch +``` + +## Loading and configuring the module +We suggest you load the module via `require` until the stabilization of ES modules in node: +```js +const fetch = require('node-fetch'); +``` + +If you are using a Promise library other than native, set it through `fetch.Promise`: +```js +const Bluebird = require('bluebird'); + +fetch.Promise = Bluebird; +``` + +## Common Usage + +NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. + +#### Plain text or HTML +```js +fetch('https://github.com/') + .then(res => res.text()) + .then(body => console.log(body)); +``` + +#### JSON + +```js + +fetch('https://api.github.com/users/github') + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Simple Post +```js +fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) + .then(res => res.json()) // expecting a json response + .then(json => console.log(json)); +``` + +#### Post with JSON + +```js +const body = { a: 1 }; + +fetch('https://httpbin.org/post', { + method: 'post', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json' }, + }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form parameters +`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. + +NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: + +```js +const { URLSearchParams } = require('url'); + +const params = new URLSearchParams(); +params.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: params }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Handling exceptions +NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information. + +Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. + +```js +fetch('https://domain.invalid/') + .catch(err => console.error(err)); +``` + +#### Handling client and server errors +It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: + +```js +function checkStatus(res) { + if (res.ok) { // res.status >= 200 && res.status < 300 + return res; + } else { + throw MyCustomError(res.statusText); + } +} + +fetch('https://httpbin.org/status/400') + .then(checkStatus) + .then(res => console.log('will not get here...')) +``` + +## Advanced Usage + +#### Streams +The "Node.js way" is to use streams when possible: + +```js +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => { + const dest = fs.createWriteStream('./octocat.png'); + res.body.pipe(dest); + }); +``` + +In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch +errors -- the longer a response runs, the more likely it is to encounter an error. + +```js +const fetch = require('node-fetch'); +const response = await fetch('https://httpbin.org/stream/3'); +try { + for await (const chunk of response.body) { + console.dir(JSON.parse(chunk.toString())); + } +} catch (err) { + console.error(err.stack); +} +``` + +In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams +did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors +directly from the stream and wait on it response to fully close. + +```js +const fetch = require('node-fetch'); +const read = async body => { + let error; + body.on('error', err => { + error = err; + }); + for await (const chunk of body) { + console.dir(JSON.parse(chunk.toString())); + } + return new Promise((resolve, reject) => { + body.on('close', () => { + error ? reject(error) : resolve(); + }); + }); +}; +try { + const response = await fetch('https://httpbin.org/stream/3'); + await read(response.body); +} catch (err) { + console.error(err.stack); +} +``` + +#### Buffer +If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API) + +```js +const fileType = require('file-type'); + +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => res.buffer()) + .then(buffer => fileType(buffer)) + .then(type => { /* ... */ }); +``` + +#### Accessing Headers and other Meta data +```js +fetch('https://github.com/') + .then(res => { + console.log(res.ok); + console.log(res.status); + console.log(res.statusText); + console.log(res.headers.raw()); + console.log(res.headers.get('content-type')); + }); +``` + +#### Extract Set-Cookie Header + +Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API. + +```js +fetch(url).then(res => { + // returns an array of values, instead of a string of comma-separated values + console.log(res.headers.raw()['set-cookie']); +}); +``` + +#### Post data using a file stream + +```js +const { createReadStream } = require('fs'); + +const stream = createReadStream('input.txt'); + +fetch('https://httpbin.org/post', { method: 'POST', body: stream }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form-data (detect multipart) + +```js +const FormData = require('form-data'); + +const form = new FormData(); +form.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: form }) + .then(res => res.json()) + .then(json => console.log(json)); + +// OR, using custom headers +// NOTE: getHeaders() is non-standard API + +const form = new FormData(); +form.append('a', 1); + +const options = { + method: 'POST', + body: form, + headers: form.getHeaders() +} + +fetch('https://httpbin.org/post', options) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Request cancellation with AbortSignal + +> NOTE: You may cancel streamed requests only on Node >= v8.0.0 + +You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). + +An example of timing out a request after 150ms could be achieved as the following: + +```js +import AbortController from 'abort-controller'; + +const controller = new AbortController(); +const timeout = setTimeout( + () => { controller.abort(); }, + 150, +); + +fetch(url, { signal: controller.signal }) + .then(res => res.json()) + .then( + data => { + useData(data) + }, + err => { + if (err.name === 'AbortError') { + // request was aborted + } + }, + ) + .finally(() => { + clearTimeout(timeout); + }); +``` + +See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. + + +## API + +### fetch(url[, options]) + +- `url` A string representing the URL for fetching +- `options` [Options](#fetch-options) for the HTTP(S) request +- Returns: Promise<[Response](#class-response)> + +Perform an HTTP(S) fetch. + +`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`. + + +### Options + +The default values are shown after each option key. + +```js +{ + // These properties are part of the Fetch Standard + method: 'GET', + headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) + body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream + redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect + signal: null, // pass an instance of AbortSignal to optionally abort requests + + // The following properties are node-fetch extensions + follow: 20, // maximum redirect count. 0 to not follow redirect + timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. + compress: true, // support gzip/deflate content encoding. false to disable + size: 0, // maximum response body size in bytes. 0 to disable + agent: null // http(s).Agent instance or function that returns an instance (see below) +} +``` + +##### Default Headers + +If no values are set, the following request headers will be sent automatically: + +Header | Value +------------------- | -------------------------------------------------------- +`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ +`Accept` | `*/*` +`Content-Length` | _(automatically calculated, if possible)_ +`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ +`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` + +Note: when `body` is a `Stream`, `Content-Length` is not set automatically. + +##### Custom Agent + +The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following: + +- Support self-signed certificate +- Use only IPv4 or IPv6 +- Custom DNS Lookup + +See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information. + +If no agent is specified, the default agent provided by Node.js is used. Note that [this changed in Node.js 19](https://github.com/nodejs/node/blob/4267b92604ad78584244488e7f7508a690cb80d0/lib/_http_agent.js#L564) to have `keepalive` true by default. If you wish to enable `keepalive` in an earlier version of Node.js, you can override the agent as per the following code sample. + +In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol. + +```js +const httpAgent = new http.Agent({ + keepAlive: true +}); +const httpsAgent = new https.Agent({ + keepAlive: true +}); + +const options = { + agent: function (_parsedURL) { + if (_parsedURL.protocol == 'http:') { + return httpAgent; + } else { + return httpsAgent; + } + } +} +``` + + +### Class: Request + +An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. + +Due to the nature of Node.js, the following properties are not implemented at this moment: + +- `type` +- `destination` +- `referrer` +- `referrerPolicy` +- `mode` +- `credentials` +- `cache` +- `integrity` +- `keepalive` + +The following node-fetch extension properties are provided: + +- `follow` +- `compress` +- `counter` +- `agent` + +See [options](#fetch-options) for exact meaning of these extensions. + +#### new Request(input[, options]) + +*(spec-compliant)* + +- `input` A string representing a URL, or another `Request` (which will be cloned) +- `options` [Options][#fetch-options] for the HTTP(S) request + +Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). + +In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. + + +### Class: Response + +An HTTP(S) response. This class implements the [Body](#iface-body) interface. + +The following properties are not implemented in node-fetch at this moment: + +- `Response.error()` +- `Response.redirect()` +- `type` +- `trailer` + +#### new Response([body[, options]]) + +*(spec-compliant)* + +- `body` A `String` or [`Readable` stream][node-readable] +- `options` A [`ResponseInit`][response-init] options dictionary + +Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). + +Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. + +#### response.ok + +*(spec-compliant)* + +Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. + +#### response.redirected + +*(spec-compliant)* + +Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. + + +### Class: Headers + +This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. + +#### new Headers([init]) + +*(spec-compliant)* + +- `init` Optional argument to pre-fill the `Headers` object + +Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object. + +```js +// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class + +const meta = { + 'Content-Type': 'text/xml', + 'Breaking-Bad': '<3' +}; +const headers = new Headers(meta); + +// The above is equivalent to +const meta = [ + [ 'Content-Type', 'text/xml' ], + [ 'Breaking-Bad', '<3' ] +]; +const headers = new Headers(meta); + +// You can in fact use any iterable objects, like a Map or even another Headers +const meta = new Map(); +meta.set('Content-Type', 'text/xml'); +meta.set('Breaking-Bad', '<3'); +const headers = new Headers(meta); +const copyOfHeaders = new Headers(headers); +``` + + +### Interface: Body + +`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. + +The following methods are not yet implemented in node-fetch at this moment: + +- `formData()` + +#### body.body + +*(deviation from spec)* + +* Node.js [`Readable` stream][node-readable] + +Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. + +#### body.bodyUsed + +*(spec-compliant)* + +* `Boolean` + +A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again. + +#### body.arrayBuffer() +#### body.blob() +#### body.json() +#### body.text() + +*(spec-compliant)* + +* Returns: Promise + +Consume the body and return a promise that will resolve to one of these formats. + +#### body.buffer() + +*(node-fetch extension)* + +* Returns: Promise<Buffer> + +Consume the body and return a promise that will resolve to a Buffer. + +#### body.textConverted() + +*(node-fetch extension)* + +* Returns: Promise<String> + +Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible. + +(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) + + +### Class: FetchError + +*(node-fetch extension)* + +An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. + + +### Class: AbortError + +*(node-fetch extension)* + +An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. + +## Acknowledgement + +Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. + +`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). + +## License + +MIT + +[npm-image]: https://flat.badgen.net/npm/v/node-fetch +[npm-url]: https://www.npmjs.com/package/node-fetch +[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch +[travis-url]: https://travis-ci.org/bitinn/node-fetch +[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master +[codecov-url]: https://codecov.io/gh/bitinn/node-fetch +[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch +[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch +[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square +[discord-url]: https://discord.gg/Zxbndcm +[opencollective-image]: https://opencollective.com/node-fetch/backers.svg +[opencollective-url]: https://opencollective.com/node-fetch +[whatwg-fetch]: https://fetch.spec.whatwg.org/ +[response-init]: https://fetch.spec.whatwg.org/#responseinit +[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams +[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers +[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md +[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md +[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/node_modules/node-fetch/browser.js b/node_modules/node-fetch/browser.js new file mode 100644 index 0000000..ee86265 --- /dev/null +++ b/node_modules/node-fetch/browser.js @@ -0,0 +1,25 @@ +"use strict"; + +// ref: https://github.com/tc39/proposal-global +var getGlobal = function () { + // the only reliable means to get the global object is + // `Function('return this')()` + // However, this causes CSP violations in Chrome apps. + if (typeof self !== 'undefined') { return self; } + if (typeof window !== 'undefined') { return window; } + if (typeof global !== 'undefined') { return global; } + throw new Error('unable to locate global object'); +} + +var globalObject = getGlobal(); + +module.exports = exports = globalObject.fetch; + +// Needed for TypeScript and Webpack. +if (globalObject.fetch) { + exports.default = globalObject.fetch.bind(globalObject); +} + +exports.Headers = globalObject.Headers; +exports.Request = globalObject.Request; +exports.Response = globalObject.Response; diff --git a/node_modules/node-fetch/lib/index.es.js b/node_modules/node-fetch/lib/index.es.js new file mode 100644 index 0000000..aae9799 --- /dev/null +++ b/node_modules/node-fetch/lib/index.es.js @@ -0,0 +1,1777 @@ +process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); + +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js new file mode 100644 index 0000000..567ff5d --- /dev/null +++ b/node_modules/node-fetch/lib/index.js @@ -0,0 +1,1787 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(require('stream')); +var http = _interopDefault(require('http')); +var Url = _interopDefault(require('url')); +var whatwgUrl = _interopDefault(require('whatwg-url')); +var https = _interopDefault(require('https')); +var zlib = _interopDefault(require('zlib')); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; diff --git a/node_modules/node-fetch/lib/index.mjs b/node_modules/node-fetch/lib/index.mjs new file mode 100644 index 0000000..2863dd9 --- /dev/null +++ b/node_modules/node-fetch/lib/index.mjs @@ -0,0 +1,1775 @@ +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/package.json b/node_modules/node-fetch/package.json new file mode 100644 index 0000000..e0be176 --- /dev/null +++ b/node_modules/node-fetch/package.json @@ -0,0 +1,89 @@ +{ + "name": "node-fetch", + "version": "2.7.0", + "description": "A light-weight module that brings window.fetch to node.js", + "main": "lib/index.js", + "browser": "./browser.js", + "module": "lib/index.mjs", + "files": [ + "lib/index.js", + "lib/index.mjs", + "lib/index.es.js", + "browser.js" + ], + "engines": { + "node": "4.x || >=6.0.0" + }, + "scripts": { + "build": "cross-env BABEL_ENV=rollup rollup -c", + "prepare": "npm run build", + "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js", + "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", + "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json" + }, + "repository": { + "type": "git", + "url": "https://github.com/bitinn/node-fetch.git" + }, + "keywords": [ + "fetch", + "http", + "promise" + ], + "author": "David Frank", + "license": "MIT", + "bugs": { + "url": "https://github.com/bitinn/node-fetch/issues" + }, + "homepage": "https://github.com/bitinn/node-fetch", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + }, + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^1.1.0", + "abortcontroller-polyfill": "^1.3.0", + "babel-core": "^6.26.3", + "babel-plugin-istanbul": "^4.1.6", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-polyfill": "^6.26.0", + "babel-preset-env": "1.4.0", + "babel-register": "^6.16.3", + "chai": "^3.5.0", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^1.1.1", + "chai-string": "~1.3.0", + "codecov": "3.3.0", + "cross-env": "^5.2.0", + "form-data": "^2.3.3", + "is-builtin-module": "^1.0.0", + "mocha": "^5.0.0", + "nyc": "11.9.0", + "parted": "^0.1.1", + "promise": "^8.0.3", + "resumer": "0.0.0", + "rollup": "^0.63.4", + "rollup-plugin-babel": "^3.0.7", + "string-to-arraybuffer": "^1.0.2", + "teeny-request": "3.7.0" + }, + "release": { + "branches": [ + "+([0-9]).x", + "main", + "next", + { + "name": "beta", + "prerelease": true + } + ] + } +} diff --git a/node_modules/sax/LICENSE b/node_modules/sax/LICENSE new file mode 100644 index 0000000..6e8e4c1 --- /dev/null +++ b/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + +Copyright (c) 2010-2022 Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/sax/README.md b/node_modules/sax/README.md new file mode 100644 index 0000000..afcd3f3 --- /dev/null +++ b/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/node_modules/sax/lib/sax.js b/node_modules/sax/lib/sax.js new file mode 100644 index 0000000..ffd441a --- /dev/null +++ b/node_modules/sax/lib/sax.js @@ -0,0 +1,1574 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + if (!Stream) Stream = function () {} + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, // \ No newline at end of file diff --git a/node_modules/tslib/tslib.es6.js b/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000..7be1c94 --- /dev/null +++ b/node_modules/tslib/tslib.es6.js @@ -0,0 +1,370 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); +} + +export default { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __createBinding: __createBinding, + __exportStar: __exportStar, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, +}; diff --git a/node_modules/tslib/tslib.es6.mjs b/node_modules/tslib/tslib.es6.mjs new file mode 100644 index 0000000..c8e2999 --- /dev/null +++ b/node_modules/tslib/tslib.es6.mjs @@ -0,0 +1,370 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); +} + +export default { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __createBinding, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, +}; diff --git a/node_modules/tslib/tslib.html b/node_modules/tslib/tslib.html new file mode 100644 index 0000000..44c9ba5 --- /dev/null +++ b/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/tslib/tslib.js b/node_modules/tslib/tslib.js new file mode 100644 index 0000000..343ecde --- /dev/null +++ b/node_modules/tslib/tslib.js @@ -0,0 +1,421 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); +}); diff --git a/node_modules/undici/LICENSE b/node_modules/undici/LICENSE new file mode 100644 index 0000000..e7323bb --- /dev/null +++ b/node_modules/undici/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Matteo Collina and Undici contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/undici/README.md b/node_modules/undici/README.md new file mode 100644 index 0000000..3ba8989 --- /dev/null +++ b/node_modules/undici/README.md @@ -0,0 +1,443 @@ +# undici + +[![Node CI](https://github.com/nodejs/undici/actions/workflows/nodejs.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici) + +An HTTP/1.1 client, written from scratch for Node.js. + +> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici. +It is also a Stranger Things reference. + +Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel. + +## Install + +``` +npm i undici +``` + +## Benchmarks + +The benchmark is a simple `hello world` [example](benchmarks/benchmark.js) using a +number of unix sockets (connections) with a pipelining depth of 10 running on Node 20.6.0. + +### Connections 1 + + +| Tests | Samples | Result | Tolerance | Difference with slowest | +|---------------------|---------|---------------|-----------|-------------------------| +| http - no keepalive | 15 | 5.32 req/sec | ± 2.61 % | - | +| http - keepalive | 10 | 5.35 req/sec | ± 2.47 % | + 0.44 % | +| undici - fetch | 15 | 41.85 req/sec | ± 2.49 % | + 686.04 % | +| undici - pipeline | 40 | 50.36 req/sec | ± 2.77 % | + 845.92 % | +| undici - stream | 15 | 60.58 req/sec | ± 2.75 % | + 1037.72 % | +| undici - request | 10 | 61.19 req/sec | ± 2.60 % | + 1049.24 % | +| undici - dispatch | 20 | 64.84 req/sec | ± 2.81 % | + 1117.81 % | + + +### Connections 50 + +| Tests | Samples | Result | Tolerance | Difference with slowest | +|---------------------|---------|------------------|-----------|-------------------------| +| undici - fetch | 30 | 2107.19 req/sec | ± 2.69 % | - | +| http - no keepalive | 10 | 2698.90 req/sec | ± 2.68 % | + 28.08 % | +| http - keepalive | 10 | 4639.49 req/sec | ± 2.55 % | + 120.17 % | +| undici - pipeline | 40 | 6123.33 req/sec | ± 2.97 % | + 190.59 % | +| undici - stream | 50 | 9426.51 req/sec | ± 2.92 % | + 347.35 % | +| undici - request | 10 | 10162.88 req/sec | ± 2.13 % | + 382.29 % | +| undici - dispatch | 50 | 11191.11 req/sec | ± 2.98 % | + 431.09 % | + + +## Quick Start + +```js +import { request } from 'undici' + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) +console.log('headers', headers) + +for await (const data of body) { + console.log('data', data) +} + +console.log('trailers', trailers) +``` + +## Body Mixins + +The `body` mixins are the most common way to format the request/response body. Mixins include: + +- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata) +- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json) +- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text) + +Example usage: + +```js +import { request } from 'undici' + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) +console.log('headers', headers) +console.log('data', await body.json()) +console.log('trailers', trailers) +``` + +_Note: Once a mixin has been called then the body cannot be reused, thus calling additional mixins on `.body`, e.g. `.body.json(); .body.text()` will result in an error `TypeError: unusable` being thrown and returned through the `Promise` rejection._ + +Should you need to access the `body` in plain-text after using a mixin, the best practice is to use the `.text()` mixin first and then manually parse the text to the desired format. + +For more information about their behavior, please reference the body mixin from the [Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin). + +## Common API Methods + +This section documents our most commonly used API methods. Additional APIs are documented in their own files within the [docs](./docs/) folder and are accessible via the navigation list on the left side of the docs site. + +### `undici.request([url, options]): Promise` + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`RequestOptions`](./docs/api/Dispatcher.md#parameter-requestoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **method** `String` - Default: `PUT` if `options.body`, otherwise `GET` + * **maxRedirections** `Integer` - Default: `0` + +Returns a promise with the result of the `Dispatcher.request` method. + +Calls `options.dispatcher.request(options)`. + +See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details. + +### `undici.stream([url, options, ]factory): Promise` + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`StreamOptions`](./docs/api/Dispatcher.md#parameter-streamoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **method** `String` - Default: `PUT` if `options.body`, otherwise `GET` + * **maxRedirections** `Integer` - Default: `0` +* **factory** `Dispatcher.stream.factory` + +Returns a promise with the result of the `Dispatcher.stream` method. + +Calls `options.dispatcher.stream(options, factory)`. + +See [Dispatcher.stream](docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details. + +### `undici.pipeline([url, options, ]handler): Duplex` + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`PipelineOptions`](docs/api/Dispatcher.md#parameter-pipelineoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **method** `String` - Default: `PUT` if `options.body`, otherwise `GET` + * **maxRedirections** `Integer` - Default: `0` +* **handler** `Dispatcher.pipeline.handler` + +Returns: `stream.Duplex` + +Calls `options.dispatch.pipeline(options, handler)`. + +See [Dispatcher.pipeline](docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details. + +### `undici.connect([url, options]): Promise` + +Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT). + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`ConnectOptions`](docs/api/Dispatcher.md#parameter-connectoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **maxRedirections** `Integer` - Default: `0` +* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional) + +Returns a promise with the result of the `Dispatcher.connect` method. + +Calls `options.dispatch.connect(options)`. + +See [Dispatcher.connect](docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details. + +### `undici.fetch(input[, init]): Promise` + +Implements [fetch](https://fetch.spec.whatwg.org/#fetch-method). + +* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch +* https://fetch.spec.whatwg.org/#fetch-method + +Only supported on Node 16.8+. + +Basic usage example: + +```js +import { fetch } from 'undici' + + +const res = await fetch('https://example.com') +const json = await res.json() +console.log(json) +``` + +You can pass an optional dispatcher to `fetch` as: + +```js +import { fetch, Agent } from 'undici' + +const res = await fetch('https://example.com', { + // Mocks are also supported + dispatcher: new Agent({ + keepAliveTimeout: 10, + keepAliveMaxTimeout: 10 + }) +}) +const json = await res.json() +console.log(json) +``` + +#### `request.body` + +A body can be of the following types: + +- ArrayBuffer +- ArrayBufferView +- AsyncIterables +- Blob +- Iterables +- String +- URLSearchParams +- FormData + +In this implementation of fetch, ```request.body``` now accepts ```Async Iterables```. It is not present in the [Fetch Standard.](https://fetch.spec.whatwg.org) + +```js +import { fetch } from 'undici' + +const data = { + async *[Symbol.asyncIterator]() { + yield 'hello' + yield 'world' + }, +} + +await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half' }) +``` + +#### `request.duplex` + +- half + +In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`. And fetch requests are currently always be full duplex. More detail refer to [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex) + +#### `response.body` + +Nodejs has two kinds of streams: [web streams](https://nodejs.org/dist/latest-v16.x/docs/api/webstreams.html), which follow the API of the WHATWG web standard found in browsers, and an older Node-specific [streams API](https://nodejs.org/api/stream.html). `response.body` returns a readable web stream. If you would prefer to work with a Node stream you can convert a web stream using `.fromWeb()`. + +```js +import { fetch } from 'undici' +import { Readable } from 'node:stream' + +const response = await fetch('https://example.com') +const readableWebStream = response.body +const readableNodeStream = Readable.fromWeb(readableWebStream) +``` + +#### Specification Compliance + +This section documents parts of the [Fetch Standard](https://fetch.spec.whatwg.org) that Undici does +not support or does not fully implement. + +##### Garbage Collection + +* https://fetch.spec.whatwg.org/#garbage-collection + +The [Fetch Standard](https://fetch.spec.whatwg.org) allows users to skip consuming the response body by relying on +[garbage collection](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Memory_Management#garbage_collection) to release connection resources. Undici does not do the same. Therefore, it is important to always either consume or cancel the response body. + +Garbage collection in Node is less aggressive and deterministic +(due to the lack of clear idle periods that browsers have through the rendering refresh rate) +which means that leaving the release of connection resources to the garbage collector can lead +to excessive connection usage, reduced performance (due to less connection re-use), and even +stalls or deadlocks when running out of connections. + +```js +// Do +const headers = await fetch(url) + .then(async res => { + for await (const chunk of res.body) { + // force consumption of body + } + return res.headers + }) + +// Do not +const headers = await fetch(url) + .then(res => res.headers) +``` + +However, if you want to get only headers, it might be better to use `HEAD` request method. Usage of this method will obviate the need for consumption or cancelling of the response body. See [MDN - HTTP - HTTP request methods - HEAD](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) for more details. + +```js +const headers = await fetch(url, { method: 'HEAD' }) + .then(res => res.headers) +``` + +##### Forbidden and Safelisted Header Names + +* https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name +* https://fetch.spec.whatwg.org/#forbidden-header-name +* https://fetch.spec.whatwg.org/#forbidden-response-header-name +* https://github.com/wintercg/fetch/issues/6 + +The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user. + +### `undici.upgrade([url, options]): Promise` + +Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details. + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`UpgradeOptions`](docs/api/Dispatcher.md#parameter-upgradeoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **maxRedirections** `Integer` - Default: `0` +* **callback** `(error: Error | null, data: UpgradeData) => void` (optional) + +Returns a promise with the result of the `Dispatcher.upgrade` method. + +Calls `options.dispatcher.upgrade(options)`. + +See [Dispatcher.upgrade](docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details. + +### `undici.setGlobalDispatcher(dispatcher)` + +* dispatcher `Dispatcher` + +Sets the global dispatcher used by Common API Methods. + +### `undici.getGlobalDispatcher()` + +Gets the global dispatcher used by Common API Methods. + +Returns: `Dispatcher` + +### `undici.setGlobalOrigin(origin)` + +* origin `string | URL | undefined` + +Sets the global origin used in `fetch`. + +If `undefined` is passed, the global origin will be reset. This will cause `Response.redirect`, `new Request()`, and `fetch` to throw an error when a relative path is passed. + +```js +setGlobalOrigin('http://localhost:3000') + +const response = await fetch('/api/ping') + +console.log(response.url) // http://localhost:3000/api/ping +``` + +### `undici.getGlobalOrigin()` + +Gets the global origin used in `fetch`. + +Returns: `URL` + +### `UrlObject` + +* **port** `string | number` (optional) +* **path** `string` (optional) +* **pathname** `string` (optional) +* **hostname** `string` (optional) +* **origin** `string` (optional) +* **protocol** `string` (optional) +* **search** `string` (optional) + +## Specification Compliance + +This section documents parts of the HTTP/1.1 specification that Undici does +not support or does not fully implement. + +### Expect + +Undici does not support the `Expect` request header field. The request +body is always immediately sent and the `100 Continue` response will be +ignored. + +Refs: https://tools.ietf.org/html/rfc7231#section-5.1.1 + +### Pipelining + +Undici will only use pipelining if configured with a `pipelining` factor +greater than `1`. + +Undici always assumes that connections are persistent and will immediately +pipeline requests, without checking whether the connection is persistent. +Hence, automatic fallback to HTTP/1.0 or HTTP/1.1 without pipelining is +not supported. + +Undici will immediately pipeline when retrying requests after a failed +connection. However, Undici will not retry the first remaining requests in +the prior pipeline and instead error the corresponding callback/promise/stream. + +Undici will abort all running requests in the pipeline when any of them are +aborted. + +* Refs: https://tools.ietf.org/html/rfc2616#section-8.1.2.2 +* Refs: https://tools.ietf.org/html/rfc7230#section-6.3.2 + +### Manual Redirect + +Since it is not possible to manually follow an HTTP redirect on the server-side, +Undici returns the actual response instead of an `opaqueredirect` filtered one +when invoked with a `manual` redirect. This aligns `fetch()` with the other +implementations in Deno and Cloudflare Workers. + +Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling + +## Workarounds + +### Network address family autoselection. + +If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record) +first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case +undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`. + +If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version +(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request` +and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection. + +## Collaborators + +* [__Daniele Belardi__](https://github.com/dnlup), +* [__Ethan Arrowood__](https://github.com/ethan-arrowood), +* [__Matteo Collina__](https://github.com/mcollina), +* [__Matthew Aitken__](https://github.com/KhafraDev), +* [__Robert Nagy__](https://github.com/ronag), +* [__Szymon Marczak__](https://github.com/szmarczak), +* [__Tomas Della Vedova__](https://github.com/delvedor), + +### Releasers + +* [__Ethan Arrowood__](https://github.com/ethan-arrowood), +* [__Matteo Collina__](https://github.com/mcollina), +* [__Robert Nagy__](https://github.com/ronag), +* [__Matthew Aitken__](https://github.com/KhafraDev), + +## License + +MIT diff --git a/node_modules/undici/docs/api/Agent.md b/node_modules/undici/docs/api/Agent.md new file mode 100644 index 0000000..dd5d99b --- /dev/null +++ b/node_modules/undici/docs/api/Agent.md @@ -0,0 +1,80 @@ +# Agent + +Extends: `undici.Dispatcher` + +Agent allow dispatching requests against multiple different origins. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new undici.Agent([options])` + +Arguments: + +* **options** `AgentOptions` (optional) + +Returns: `Agent` + +### Parameter: `AgentOptions` + +Extends: [`PoolOptions`](Pool.md#parameter-pooloptions) + +* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)` +* **maxRedirections** `Integer` - Default: `0`. The number of HTTP redirection to follow unless otherwise specified in `DispatchOptions`. +* **interceptors** `{ Agent: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. + +## Instance Properties + +### `Agent.closed` + +Implements [Client.closed](Client.md#clientclosed) + +### `Agent.destroyed` + +Implements [Client.destroyed](Client.md#clientdestroyed) + +## Instance Methods + +### `Agent.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `Agent.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `Agent.dispatch(options, handler: AgentDispatchOptions)` + +Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +#### Parameter: `AgentDispatchOptions` + +Extends: [`DispatchOptions`](Dispatcher.md#parameter-dispatchoptions) + +* **origin** `string | URL` +* **maxRedirections** `Integer`. + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `Agent.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `Agent.dispatch(options, handler)` + +Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `Agent.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `Agent.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `Agent.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `Agent.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). diff --git a/node_modules/undici/docs/api/BalancedPool.md b/node_modules/undici/docs/api/BalancedPool.md new file mode 100644 index 0000000..290c734 --- /dev/null +++ b/node_modules/undici/docs/api/BalancedPool.md @@ -0,0 +1,99 @@ +# Class: BalancedPool + +Extends: `undici.Dispatcher` + +A pool of [Pool](Pool.md) instances connected to multiple upstreams. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new BalancedPool(upstreams [, options])` + +Arguments: + +* **upstreams** `URL | string | string[]` - It should only include the **protocol, hostname, and port**. +* **options** `BalancedPoolOptions` (optional) + +### Parameter: `BalancedPoolOptions` + +Extends: [`PoolOptions`](Pool.md#parameter-pooloptions) + +* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)` + +The `PoolOptions` are passed to each of the `Pool` instances being created. +## Instance Properties + +### `BalancedPool.upstreams` + +Returns an array of upstreams that were previously added. + +### `BalancedPool.closed` + +Implements [Client.closed](Client.md#clientclosed) + +### `BalancedPool.destroyed` + +Implements [Client.destroyed](Client.md#clientdestroyed) + +### `Pool.stats` + +Returns [`PoolStats`](PoolStats.md) instance for this pool. + +## Instance Methods + +### `BalancedPool.addUpstream(upstream)` + +Add an upstream. + +Arguments: + +* **upstream** `string` - It should only include the **protocol, hostname, and port**. + +### `BalancedPool.removeUpstream(upstream)` + +Removes an upstream that was previously addded. + +### `BalancedPool.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `BalancedPool.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `BalancedPool.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `BalancedPool.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `BalancedPool.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `BalancedPool.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `BalancedPool.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `BalancedPool.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). + +## Instance Events + +### Event: `'connect'` + +See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect). + +### Event: `'disconnect'` + +See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect). + +### Event: `'drain'` + +See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain). diff --git a/node_modules/undici/docs/api/CacheStorage.md b/node_modules/undici/docs/api/CacheStorage.md new file mode 100644 index 0000000..08ee99f --- /dev/null +++ b/node_modules/undici/docs/api/CacheStorage.md @@ -0,0 +1,30 @@ +# CacheStorage + +Undici exposes a W3C spec-compliant implementation of [CacheStorage](https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage) and [Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache). + +## Opening a Cache + +Undici exports a top-level CacheStorage instance. You can open a new Cache, or duplicate a Cache with an existing name, by using `CacheStorage.prototype.open`. If you open a Cache with the same name as an already-existing Cache, its list of cached Responses will be shared between both instances. + +```mjs +import { caches } from 'undici' + +const cache_1 = await caches.open('v1') +const cache_2 = await caches.open('v1') + +// Although .open() creates a new instance, +assert(cache_1 !== cache_2) +// The same Response is matched in both. +assert.deepStrictEqual(await cache_1.match('/req'), await cache_2.match('/req')) +``` + +## Deleting a Cache + +If a Cache is deleted, the cached Responses/Requests can still be used. + +```mjs +const response = await cache_1.match('/req') +await caches.delete('v1') + +await response.text() // the Response's body +``` diff --git a/node_modules/undici/docs/api/Client.md b/node_modules/undici/docs/api/Client.md new file mode 100644 index 0000000..b9e26f0 --- /dev/null +++ b/node_modules/undici/docs/api/Client.md @@ -0,0 +1,273 @@ +# Class: Client + +Extends: `undici.Dispatcher` + +A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new Client(url[, options])` + +Arguments: + +* **url** `URL | string` - Should only include the **protocol, hostname, and port**. +* **options** `ClientOptions` (optional) + +Returns: `Client` + +### Parameter: `ClientOptions` + +> ⚠️ Warning: The `H2` support is experimental. + +* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. +* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds. +* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes. +* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds. +* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second. +* **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB. +* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable. +* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections. +* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`. +* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body. +* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. +* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version. +* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. +* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation. +* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame. + +#### Parameter: `ConnectOptions` + +Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback). +Furthermore, the following options can be passed: + +* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe. +* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100. +* **timeout** `number | null` (optional) - In milliseconds, Default `10e3`. +* **servername** `string | null` (optional) +* **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled +* **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds + +### Example - Basic Client instantiation + +This will instantiate the undici Client, but it will not connect to the origin until something is queued. Consider using `client.connect` to prematurely connect to the origin, or just call `client.request`. + +```js +'use strict' +import { Client } from 'undici' + +const client = new Client('http://localhost:3000') +``` + +### Example - Custom connector + +This will allow you to perform some additional check on the socket that will be used for the next request. + +```js +'use strict' +import { Client, buildConnector } from 'undici' + +const connector = buildConnector({ rejectUnauthorized: false }) +const client = new Client('https://localhost:3000', { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (/* assertion */) { + socket.destroy() + cb(new Error('kaboom')) + } else { + cb(null, socket) + } + }) + } +}) +``` + +## Instance Methods + +### `Client.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `Client.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided). + +### `Client.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `Client.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `Client.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `Client.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `Client.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `Client.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). + +## Instance Properties + +### `Client.closed` + +* `boolean` + +`true` after `client.close()` has been called. + +### `Client.destroyed` + +* `boolean` + +`true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed. + +### `Client.pipelining` + +* `number` + +Property to get and set the pipelining factor. + +## Instance Events + +### Event: `'connect'` + +See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect). + +Parameters: + +* **origin** `URL` +* **targets** `Array` + +Emitted when a socket has been created and connected. The client will connect once `client.size > 0`. + +#### Example - Client connect event + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +client.on('connect', (origin) => { + console.log(`Connected to ${origin}`) // should print before the request body statement +}) + +try { + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('utf-8') + body.on('data', console.log) + client.close() + server.close() +} catch (error) { + console.error(error) + client.close() + server.close() +} +``` + +### Event: `'disconnect'` + +See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect). + +Parameters: + +* **origin** `URL` +* **targets** `Array` +* **error** `Error` + +Emitted when socket has disconnected. The error argument of the event is the error which caused the socket to disconnect. The client will reconnect if or once `client.size > 0`. + +#### Example - Client disconnect event + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.destroy() +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +client.on('disconnect', (origin) => { + console.log(`Disconnected from ${origin}`) +}) + +try { + await client.request({ + path: '/', + method: 'GET' + }) +} catch (error) { + console.error(error.message) + client.close() + server.close() +} +``` + +### Event: `'drain'` + +Emitted when pipeline is no longer busy. + +See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain). + +#### Example - Client drain event + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +client.on('drain', () => { + console.log('drain event') + client.close() + server.close() +}) + +const requests = [ + client.request({ path: '/', method: 'GET' }), + client.request({ path: '/', method: 'GET' }), + client.request({ path: '/', method: 'GET' }) +] + +await Promise.all(requests) + +console.log('requests completed') +``` + +### Event: `'error'` + +Invoked for users errors such as throwing in the `onError` handler. diff --git a/node_modules/undici/docs/api/Connector.md b/node_modules/undici/docs/api/Connector.md new file mode 100644 index 0000000..56821bd --- /dev/null +++ b/node_modules/undici/docs/api/Connector.md @@ -0,0 +1,115 @@ +# Connector + +Undici creates the underlying socket via the connector builder. +Normally, this happens automatically and you don't need to care about this, +but if you need to perform some additional check over the currently used socket, +this is the right place. + +If you want to create a custom connector, you must import the `buildConnector` utility. + +#### Parameter: `buildConnector.BuildOptions` + +Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback). +Furthermore, the following options can be passed: + +* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe. +* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: `100`. +* **timeout** `number | null` (optional) - In milliseconds. Default `10e3`. +* **servername** `string | null` (optional) + +Once you call `buildConnector`, it will return a connector function, which takes the following parameters. + +#### Parameter: `connector.Options` + +* **hostname** `string` (required) +* **host** `string` (optional) +* **protocol** `string` (required) +* **port** `string` (required) +* **servername** `string` (optional) +* **localAddress** `string | null` (optional) Local address the socket should connect from. +* **httpSocket** `Socket` (optional) Establish secure connection on a given socket rather than creating a new socket. It can only be sent on TLS update. + +### Basic example + +```js +'use strict' + +import { Client, buildConnector } from 'undici' + +const connector = buildConnector({ rejectUnauthorized: false }) +const client = new Client('https://localhost:3000', { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (/* assertion */) { + socket.destroy() + cb(new Error('kaboom')) + } else { + cb(null, socket) + } + }) + } +}) +``` + +### Example: validate the CA fingerprint + +```js +'use strict' + +import { Client, buildConnector } from 'undici' + +const caFingerprint = 'FO:OB:AR' +const connector = buildConnector({ rejectUnauthorized: false }) +const client = new Client('https://localhost:3000', { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) { + socket.destroy() + cb(new Error('Fingerprint does not match or malformed certificate')) + } else { + cb(null, socket) + } + }) + } +}) + +client.request({ + path: '/', + method: 'GET' +}, (err, data) => { + if (err) throw err + + const bufs = [] + data.body.on('data', (buf) => { + bufs.push(buf) + }) + data.body.on('end', () => { + console.log(Buffer.concat(bufs).toString('utf8')) + client.close() + }) +}) + +function getIssuerCertificate (socket) { + let certificate = socket.getPeerCertificate(true) + while (certificate && Object.keys(certificate).length > 0) { + // invalid certificate + if (certificate.issuerCertificate == null) { + return null + } + + // We have reached the root certificate. + // In case of self-signed certificates, `issuerCertificate` may be a circular reference. + if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) { + break + } + + // continue the loop + certificate = certificate.issuerCertificate + } + return certificate +} +``` diff --git a/node_modules/undici/docs/api/ContentType.md b/node_modules/undici/docs/api/ContentType.md new file mode 100644 index 0000000..2bcc9f7 --- /dev/null +++ b/node_modules/undici/docs/api/ContentType.md @@ -0,0 +1,57 @@ +# MIME Type Parsing + +## `MIMEType` interface + +* **type** `string` +* **subtype** `string` +* **parameters** `Map` +* **essence** `string` + +## `parseMIMEType(input)` + +Implements [parse a MIME type](https://mimesniff.spec.whatwg.org/#parse-a-mime-type). + +Parses a MIME type, returning its type, subtype, and any associated parameters. If the parser can't parse an input it returns the string literal `'failure'`. + +```js +import { parseMIMEType } from 'undici' + +parseMIMEType('text/html; charset=gbk') +// { +// type: 'text', +// subtype: 'html', +// parameters: Map(1) { 'charset' => 'gbk' }, +// essence: 'text/html' +// } +``` + +Arguments: + +* **input** `string` + +Returns: `MIMEType|'failure'` + +## `serializeAMimeType(input)` + +Implements [serialize a MIME type](https://mimesniff.spec.whatwg.org/#serialize-a-mime-type). + +Serializes a MIMEType object. + +```js +import { serializeAMimeType } from 'undici' + +serializeAMimeType({ + type: 'text', + subtype: 'html', + parameters: new Map([['charset', 'gbk']]), + essence: 'text/html' +}) +// text/html;charset=gbk + +``` + +Arguments: + +* **mimeType** `MIMEType` + +Returns: `string` diff --git a/node_modules/undici/docs/api/Cookies.md b/node_modules/undici/docs/api/Cookies.md new file mode 100644 index 0000000..0cad379 --- /dev/null +++ b/node_modules/undici/docs/api/Cookies.md @@ -0,0 +1,101 @@ +# Cookie Handling + +## `Cookie` interface + +* **name** `string` +* **value** `string` +* **expires** `Date|number` (optional) +* **maxAge** `number` (optional) +* **domain** `string` (optional) +* **path** `string` (optional) +* **secure** `boolean` (optional) +* **httpOnly** `boolean` (optional) +* **sameSite** `'String'|'Lax'|'None'` (optional) +* **unparsed** `string[]` (optional) Left over attributes that weren't parsed. + +## `deleteCookie(headers, name[, attributes])` + +Sets the expiry time of the cookie to the unix epoch, causing browsers to delete it when received. + +```js +import { deleteCookie, Headers } from 'undici' + +const headers = new Headers() +deleteCookie(headers, 'name') + +console.log(headers.get('set-cookie')) // name=; Expires=Thu, 01 Jan 1970 00:00:00 GMT +``` + +Arguments: + +* **headers** `Headers` +* **name** `string` +* **attributes** `{ path?: string, domain?: string }` (optional) + +Returns: `void` + +## `getCookies(headers)` + +Parses the `Cookie` header and returns a list of attributes and values. + +```js +import { getCookies, Headers } from 'undici' + +const headers = new Headers({ + cookie: 'get=cookies; and=attributes' +}) + +console.log(getCookies(headers)) // { get: 'cookies', and: 'attributes' } +``` + +Arguments: + +* **headers** `Headers` + +Returns: `Record` + +## `getSetCookies(headers)` + +Parses all `Set-Cookie` headers. + +```js +import { getSetCookies, Headers } from 'undici' + +const headers = new Headers({ 'set-cookie': 'undici=getSetCookies; Secure' }) + +console.log(getSetCookies(headers)) +// [ +// { +// name: 'undici', +// value: 'getSetCookies', +// secure: true +// } +// ] + +``` + +Arguments: + +* **headers** `Headers` + +Returns: `Cookie[]` + +## `setCookie(headers, cookie)` + +Appends a cookie to the `Set-Cookie` header. + +```js +import { setCookie, Headers } from 'undici' + +const headers = new Headers() +setCookie(headers, { name: 'undici', value: 'setCookie' }) + +console.log(headers.get('Set-Cookie')) // undici=setCookie +``` + +Arguments: + +* **headers** `Headers` +* **cookie** `Cookie` + +Returns: `void` diff --git a/node_modules/undici/docs/api/DiagnosticsChannel.md b/node_modules/undici/docs/api/DiagnosticsChannel.md new file mode 100644 index 0000000..0aa0b9a --- /dev/null +++ b/node_modules/undici/docs/api/DiagnosticsChannel.md @@ -0,0 +1,204 @@ +# Diagnostics Channel Support + +Stability: Experimental. + +Undici supports the [`diagnostics_channel`](https://nodejs.org/api/diagnostics_channel.html) (currently available only on Node.js v16+). +It is the preferred way to instrument Undici and retrieve internal information. + +The channels available are the following. + +## `undici:request:create` + +This message is published when a new outgoing request is created. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => { + console.log('origin', request.origin) + console.log('completed', request.completed) + console.log('method', request.method) + console.log('path', request.path) + console.log('headers') // raw text, e.g: 'bar: bar\r\n' + request.addHeader('hello', 'world') + console.log('headers', request.headers) // e.g. 'bar: bar\r\nhello: world\r\n' +}) +``` + +Note: a request is only loosely completed to a given socket. + + +## `undici:request:bodySent` + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => { + // request is the same object undici:request:create +}) +``` + +## `undici:request:headers` + +This message is published after the response headers have been received, i.e. the response has been completed. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => { + // request is the same object undici:request:create + console.log('statusCode', response.statusCode) + console.log(response.statusText) + // response.headers are buffers. + console.log(response.headers.map((x) => x.toString())) +}) +``` + +## `undici:request:trailers` + +This message is published after the response body and trailers have been received, i.e. the response has been completed. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => { + // request is the same object undici:request:create + console.log('completed', request.completed) + // trailers are buffers. + console.log(trailers.map((x) => x.toString())) +}) +``` + +## `undici:request:error` + +This message is published if the request is going to error, but it has not errored yet. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:error').subscribe(({ request, error }) => { + // request is the same object undici:request:create +}) +``` + +## `undici:client:sendHeaders` + +This message is published right before the first byte of the request is written to the socket. + +*Note*: It will publish the exact headers that will be sent to the server in raw format. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => { + // request is the same object undici:request:create + console.log(`Full headers list ${headers.split('\r\n')}`); +}) +``` + +## `undici:client:beforeConnect` + +This message is published before creating a new connection for **any** request. +You can not assume that this event is related to any specific request. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { + // const { host, hostname, protocol, port, servername } = connectParams + // connector is a function that creates the socket +}) +``` + +## `undici:client:connected` + +This message is published after a connection is established. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => { + // const { host, hostname, protocol, port, servername } = connectParams + // connector is a function that creates the socket +}) +``` + +## `undici:client:connectError` + +This message is published if it did not succeed to create new connection + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => { + // const { host, hostname, protocol, port, servername } = connectParams + // connector is a function that creates the socket + console.log(`Connect failed with ${error.message}`) +}) +``` + +## `undici:websocket:open` + +This message is published after the client has successfully connected to a server. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:open').subscribe(({ address, protocol, extensions }) => { + console.log(address) // address, family, and port + console.log(protocol) // negotiated subprotocols + console.log(extensions) // negotiated extensions +}) +``` + +## `undici:websocket:close` + +This message is published after the connection has closed. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:close').subscribe(({ websocket, code, reason }) => { + console.log(websocket) // the WebSocket object + console.log(code) // the closing status code + console.log(reason) // the closing reason +}) +``` + +## `undici:websocket:socket_error` + +This message is published if the socket experiences an error. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:socket_error').subscribe((error) => { + console.log(error) +}) +``` + +## `undici:websocket:ping` + +This message is published after the client receives a ping frame, if the connection is not closing. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:ping').subscribe(({ payload }) => { + // a Buffer or undefined, containing the optional application data of the frame + console.log(payload) +}) +``` + +## `undici:websocket:pong` + +This message is published after the client receives a pong frame. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:pong').subscribe(({ payload }) => { + // a Buffer or undefined, containing the optional application data of the frame + console.log(payload) +}) +``` diff --git a/node_modules/undici/docs/api/DispatchInterceptor.md b/node_modules/undici/docs/api/DispatchInterceptor.md new file mode 100644 index 0000000..7dfc260 --- /dev/null +++ b/node_modules/undici/docs/api/DispatchInterceptor.md @@ -0,0 +1,60 @@ +# Interface: DispatchInterceptor + +Extends: `Function` + +A function that can be applied to the `Dispatcher.Dispatch` function before it is invoked with a dispatch request. + +This allows one to write logic to intercept both the outgoing request, and the incoming response. + +### Parameter: `Dispatcher.Dispatch` + +The base dispatch function you are decorating. + +### ReturnType: `Dispatcher.Dispatch` + +A dispatch function that has been altered to provide additional logic + +### Basic Example + +Here is an example of an interceptor being used to provide a JWT bearer token + +```js +'use strict' + +const insertHeaderInterceptor = dispatch => { + return function InterceptedDispatch(opts, handler){ + opts.headers.push('Authorization', 'Bearer [Some token]') + return dispatch(opts, handler) + } +} + +const client = new Client('https://localhost:3000', { + interceptors: { Client: [insertHeaderInterceptor] } +}) + +``` + +### Basic Example 2 + +Here is a contrived example of an interceptor stripping the headers from a response. + +```js +'use strict' + +const clearHeadersInterceptor = dispatch => { + const { DecoratorHandler } = require('undici') + class ResultInterceptor extends DecoratorHandler { + onHeaders (statusCode, headers, resume) { + return super.onHeaders(statusCode, [], resume) + } + } + return function InterceptedDispatch(opts, handler){ + return dispatch(opts, new ResultInterceptor(handler)) + } +} + +const client = new Client('https://localhost:3000', { + interceptors: { Client: [clearHeadersInterceptor] } +}) + +``` diff --git a/node_modules/undici/docs/api/Dispatcher.md b/node_modules/undici/docs/api/Dispatcher.md new file mode 100644 index 0000000..fd463bf --- /dev/null +++ b/node_modules/undici/docs/api/Dispatcher.md @@ -0,0 +1,887 @@ +# Dispatcher + +Extends: `events.EventEmitter` + +Dispatcher is the core API used to dispatch requests. + +Requests are not guaranteed to be dispatched in order of invocation. + +## Instance Methods + +### `Dispatcher.close([callback]): Promise` + +Closes the dispatcher and gracefully waits for enqueued requests to complete before resolving. + +Arguments: + +* **callback** `(error: Error | null, data: null) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +```js +dispatcher.close() // -> Promise +dispatcher.close(() => {}) // -> void +``` + +#### Example - Request resolves before Client closes + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('undici') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('utf8') + body.on('data', console.log) +} catch (error) {} + +await client.close() + +console.log('Client closed') +server.close() +``` + +### `Dispatcher.connect(options[, callback])` + +Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT). + +Arguments: + +* **options** `ConnectOptions` +* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +#### Parameter: `ConnectOptions` + +* **path** `string` +* **headers** `UndiciHeaders` (optional) - Default: `null` +* **signal** `AbortSignal | events.EventEmitter | null` (optional) - Default: `null` +* **opaque** `unknown` (optional) - This argument parameter is passed through to `ConnectData` + +#### Parameter: `ConnectData` + +* **statusCode** `number` +* **headers** `Record` +* **socket** `stream.Duplex` +* **opaque** `unknown` + +#### Example - Connect request with echo + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + throw Error('should never get here') +}).listen() + +server.on('connect', (req, socket, head) => { + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = head.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) +}) + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { socket } = await client.connect({ + path: '/' + }) + const wanted = 'Body' + let data = '' + socket.on('data', d => { data += d }) + socket.on('end', () => { + console.log(`Data received: ${data.toString()} | Data wanted: ${wanted}`) + client.close() + server.close() + }) + socket.write(wanted) + socket.end() +} catch (error) { } +``` + +### `Dispatcher.destroy([error, callback]): Promise` + +Destroy the dispatcher abruptly with the given error. All the pending and running requests will be asynchronously aborted and error. Since this operation is asynchronously dispatched there might still be some progress on dispatched requests. + +Both arguments are optional; the method can be called in four different ways: + +Arguments: + +* **error** `Error | null` (optional) +* **callback** `(error: Error | null, data: null) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +```js +dispatcher.destroy() // -> Promise +dispatcher.destroy(new Error()) // -> Promise +dispatcher.destroy(() => {}) // -> void +dispatcher.destroy(new Error(), () => {}) // -> void +``` + +#### Example - Request is aborted when Client is destroyed + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end() +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const request = client.request({ + path: '/', + method: 'GET' + }) + client.destroy() + .then(() => { + console.log('Client destroyed') + server.close() + }) + await request +} catch (error) { + console.error(error) +} +``` + +### `Dispatcher.dispatch(options, handler)` + +This is the low level API which all the preceding APIs are implemented on top of. +This API is expected to evolve through semver-major versions and is less stable than the preceding higher level APIs. +It is primarily intended for library developers who implement higher level APIs on top of this. + +Arguments: + +* **options** `DispatchOptions` +* **handler** `DispatchHandler` + +Returns: `Boolean` - `false` if dispatcher is busy and further dispatch calls won't make any progress until the `'drain'` event has been emitted. + +#### Parameter: `DispatchOptions` + +* **origin** `string | URL` +* **path** `string` +* **method** `string` +* **reset** `boolean` (optional) - Default: `false` - If `false`, the request will attempt to create a long-living connection by sending the `connection: keep-alive` header,otherwise will attempt to close it immediately after response by sending `connection: close` within the request and closing the socket afterwards. +* **body** `string | Buffer | Uint8Array | stream.Readable | Iterable | AsyncIterable | null` (optional) - Default: `null` +* **headers** `UndiciHeaders | string[]` (optional) - Default: `null`. +* **query** `Record | null` (optional) - Default: `null` - Query string params to be embedded in the request URL. Note that both keys and values of query are encoded using `encodeURIComponent`. If for some reason you need to send them unencoded, embed query params into path directly instead. +* **idempotent** `boolean` (optional) - Default: `true` if `method` is `'HEAD'` or `'GET'` - Whether the requests can be safely retried or not. If `false` the request won't be sent until all preceding requests in the pipeline has completed. +* **blocking** `boolean` (optional) - Default: `false` - Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received. +* **upgrade** `string | null` (optional) - Default: `null` - Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`. +* **bodyTimeout** `number | null` (optional) - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. +* **headersTimeout** `number | null` (optional) - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds. +* **throwOnError** `boolean` (optional) - Default: `false` - Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server. +* **expectContinue** `boolean` (optional) - Default: `false` - For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server + +#### Parameter: `DispatchHandler` + +* **onConnect** `(abort: () => void, context: object) => void` - Invoked before request is dispatched on socket. May be invoked multiple times when a request is retried when the request at the head of the pipeline fails. +* **onError** `(error: Error) => void` - Invoked when an error has occurred. May not throw. +* **onUpgrade** `(statusCode: number, headers: Buffer[], socket: Duplex) => void` (optional) - Invoked when request is upgraded. Required if `DispatchOptions.upgrade` is defined or `DispatchOptions.method === 'CONNECT'`. +* **onHeaders** `(statusCode: number, headers: Buffer[], resume: () => void, statusText: string) => boolean` - Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. Not required for `upgrade` requests. +* **onData** `(chunk: Buffer) => boolean` - Invoked when response payload data is received. Not required for `upgrade` requests. +* **onComplete** `(trailers: Buffer[]) => void` - Invoked when response payload and trailers have been received and the request has completed. Not required for `upgrade` requests. +* **onBodySent** `(chunk: string | Buffer | Uint8Array) => void` - Invoked when a body chunk is sent to the server. Not required. For a stream or iterable body this will be invoked for every chunk. For other body types, it will be invoked once after the body is sent. + +#### Example 1 - Dispatch GET request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +const data = [] + +client.dispatch({ + path: '/', + method: 'GET', + headers: { + 'x-foo': 'bar' + } +}, { + onConnect: () => { + console.log('Connected!') + }, + onError: (error) => { + console.error(error) + }, + onHeaders: (statusCode, headers) => { + console.log(`onHeaders | statusCode: ${statusCode} | headers: ${headers}`) + }, + onData: (chunk) => { + console.log('onData: chunk received') + data.push(chunk) + }, + onComplete: (trailers) => { + console.log(`onComplete | trailers: ${trailers}`) + const res = Buffer.concat(data).toString('utf8') + console.log(`Data: ${res}`) + client.close() + server.close() + } +}) +``` + +#### Example 2 - Dispatch Upgrade Request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end() +}).listen() + +await once(server, 'listening') + +server.on('upgrade', (request, socket, head) => { + console.log('Node.js Server - upgrade event') + socket.write('HTTP/1.1 101 Web Socket Protocol Handshake\r\n') + socket.write('Upgrade: WebSocket\r\n') + socket.write('Connection: Upgrade\r\n') + socket.write('\r\n') + socket.end() +}) + +const client = new Client(`http://localhost:${server.address().port}`) + +client.dispatch({ + path: '/', + method: 'GET', + upgrade: 'websocket' +}, { + onConnect: () => { + console.log('Undici Client - onConnect') + }, + onError: (error) => { + console.log('onError') // shouldn't print + }, + onUpgrade: (statusCode, headers, socket) => { + console.log('Undici Client - onUpgrade') + console.log(`onUpgrade Headers: ${headers}`) + socket.on('data', buffer => { + console.log(buffer.toString('utf8')) + }) + socket.on('end', () => { + client.close() + server.close() + }) + socket.end() + } +}) +``` + +#### Example 3 - Dispatch POST request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + request.on('data', (data) => { + console.log(`Request Data: ${data.toString('utf8')}`) + const body = JSON.parse(data) + body.message = 'World' + response.end(JSON.stringify(body)) + }) +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +const data = [] + +client.dispatch({ + path: '/', + method: 'POST', + headers: { + 'content-type': 'application/json' + }, + body: JSON.stringify({ message: 'Hello' }) +}, { + onConnect: () => { + console.log('Connected!') + }, + onError: (error) => { + console.error(error) + }, + onHeaders: (statusCode, headers) => { + console.log(`onHeaders | statusCode: ${statusCode} | headers: ${headers}`) + }, + onData: (chunk) => { + console.log('onData: chunk received') + data.push(chunk) + }, + onComplete: (trailers) => { + console.log(`onComplete | trailers: ${trailers}`) + const res = Buffer.concat(data).toString('utf8') + console.log(`Response Data: ${res}`) + client.close() + server.close() + } +}) +``` + +### `Dispatcher.pipeline(options, handler)` + +For easy use with [stream.pipeline](https://nodejs.org/api/stream.html#stream_stream_pipeline_source_transforms_destination_callback). The `handler` argument should return a `Readable` from which the result will be read. Usually it should just return the `body` argument unless some kind of transformation needs to be performed based on e.g. `headers` or `statusCode`. The `handler` should validate the response and save any required state. If there is an error, it should be thrown. The function returns a `Duplex` which writes to the request and reads from the response. + +Arguments: + +* **options** `PipelineOptions` +* **handler** `(data: PipelineHandlerData) => stream.Readable` + +Returns: `stream.Duplex` + +#### Parameter: PipelineOptions + +Extends: [`RequestOptions`](#parameter-requestoptions) + +* **objectMode** `boolean` (optional) - Default: `false` - Set to `true` if the `handler` will return an object stream. + +#### Parameter: PipelineHandlerData + +* **statusCode** `number` +* **headers** `Record` +* **opaque** `unknown` +* **body** `stream.Readable` +* **context** `object` +* **onInfo** `({statusCode: number, headers: Record}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received. + +#### Example 1 - Pipeline Echo + +```js +import { Readable, Writable, PassThrough, pipeline } from 'stream' +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + request.pipe(response) +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +let res = '' + +pipeline( + new Readable({ + read () { + this.push(Buffer.from('undici')) + this.push(null) + } + }), + client.pipeline({ + path: '/', + method: 'GET' + }, ({ statusCode, headers, body }) => { + console.log(`response received ${statusCode}`) + console.log('headers', headers) + return pipeline(body, new PassThrough(), () => {}) + }), + new Writable({ + write (chunk, _, callback) { + res += chunk.toString() + callback() + }, + final (callback) { + console.log(`Response pipelined to writable: ${res}`) + callback() + } + }), + error => { + if (error) { + console.error(error) + } + + client.close() + server.close() + } +) +``` + +### `Dispatcher.request(options[, callback])` + +Performs a HTTP request. + +Non-idempotent requests will not be pipelined in order +to avoid indirect failures. + +Idempotent requests will be automatically retried if +they fail due to indirect failure from the request +at the head of the pipeline. This does not apply to +idempotent requests with a stream request body. + +All response bodies must always be fully consumed or destroyed. + +Arguments: + +* **options** `RequestOptions` +* **callback** `(error: Error | null, data: ResponseData) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed. + +#### Parameter: `RequestOptions` + +Extends: [`DispatchOptions`](#parameter-dispatchoptions) + +* **opaque** `unknown` (optional) - Default: `null` - Used for passing through context to `ResponseData`. +* **signal** `AbortSignal | events.EventEmitter | null` (optional) - Default: `null`. +* **onInfo** `({statusCode: number, headers: Record}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received. + +The `RequestOptions.method` property should not be value `'CONNECT'`. + +#### Parameter: `ResponseData` + +* **statusCode** `number` +* **headers** `Record` - Note that all header keys are lower-cased, e. g. `content-type`. +* **body** `stream.Readable` which also implements [the body mixin from the Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin). +* **trailers** `Record` - This object starts out + as empty and will be mutated to contain trailers after `body` has emitted `'end'`. +* **opaque** `unknown` +* **context** `object` + +`body` contains the following additional [body mixin](https://fetch.spec.whatwg.org/#body-mixin) methods and properties: + +- `text()` +- `json()` +- `arrayBuffer()` +- `body` +- `bodyUsed` + +`body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`. + +`body` contains the following additional extensions: + +- `dump({ limit: Integer })`, dump the response by reading up to `limit` bytes without killing the socket (optional) - Default: 262144. + +Note that body will still be a `Readable` even if it is empty, but attempting to deserialize it with `json()` will result in an exception. Recommended way to ensure there is a body to deserialize is to check if status code is not 204, and `content-type` header starts with `application/json`. + +#### Example 1 - Basic GET Request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { body, headers, statusCode, trailers } = await client.request({ + path: '/', + method: 'GET' + }) + console.log(`response received ${statusCode}`) + console.log('headers', headers) + body.setEncoding('utf8') + body.on('data', console.log) + body.on('end', () => { + console.log('trailers', trailers) + }) + + client.close() + server.close() +} catch (error) { + console.error(error) +} +``` + +#### Example 2 - Aborting a request + +> Node.js v15+ is required to run this example + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) +const abortController = new AbortController() + +try { + client.request({ + path: '/', + method: 'GET', + signal: abortController.signal + }) +} catch (error) { + console.error(error) // should print an RequestAbortedError + client.close() + server.close() +} + +abortController.abort() +``` + +Alternatively, any `EventEmitter` that emits an `'abort'` event may be used as an abort controller: + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import EventEmitter, { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) +const ee = new EventEmitter() + +try { + client.request({ + path: '/', + method: 'GET', + signal: ee + }) +} catch (error) { + console.error(error) // should print an RequestAbortedError + client.close() + server.close() +} + +ee.emit('abort') +``` + +Destroying the request or response body will have the same effect. + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.destroy() +} catch (error) { + console.error(error) // should print an RequestAbortedError + client.close() + server.close() +} +``` + +### `Dispatcher.stream(options, factory[, callback])` + +A faster version of `Dispatcher.request`. This method expects the second argument `factory` to return a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable) stream which the response will be written to. This improves performance by avoiding creating an intermediate [`stream.Readable`](https://nodejs.org/api/stream.html#stream_readable_streams) stream when the user expects to directly pipe the response body to a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable) stream. + +As demonstrated in [Example 1 - Basic GET stream request](#example-1---basic-get-stream-request), it is recommended to use the `option.opaque` property to avoid creating a closure for the `factory` method. This pattern works well with Node.js Web Frameworks such as [Fastify](https://fastify.io). See [Example 2 - Stream to Fastify Response](#example-2---stream-to-fastify-response) for more details. + +Arguments: + +* **options** `RequestOptions` +* **factory** `(data: StreamFactoryData) => stream.Writable` +* **callback** `(error: Error | null, data: StreamData) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +#### Parameter: `StreamFactoryData` + +* **statusCode** `number` +* **headers** `Record` +* **opaque** `unknown` +* **onInfo** `({statusCode: number, headers: Record}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received. + +#### Parameter: `StreamData` + +* **opaque** `unknown` +* **trailers** `Record` +* **context** `object` + +#### Example 1 - Basic GET stream request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' +import { Writable } from 'stream' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +const bufs = [] + +try { + await client.stream({ + path: '/', + method: 'GET', + opaque: { bufs } + }, ({ statusCode, headers, opaque: { bufs } }) => { + console.log(`response received ${statusCode}`) + console.log('headers', headers) + return new Writable({ + write (chunk, encoding, callback) { + bufs.push(chunk) + callback() + } + }) + }) + + console.log(Buffer.concat(bufs).toString('utf-8')) + + client.close() + server.close() +} catch (error) { + console.error(error) +} +``` + +#### Example 2 - Stream to Fastify Response + +In this example, a (fake) request is made to the fastify server using `fastify.inject()`. This request then executes the fastify route handler which makes a subsequent request to the raw Node.js http server using `undici.dispatcher.stream()`. The fastify response is passed to the `opaque` option so that undici can tap into the underlying writable stream using `response.raw`. This methodology demonstrates how one could use undici and fastify together to create fast-as-possible requests from one backend server to another. + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' +import fastify from 'fastify' + +const nodeServer = createServer((request, response) => { + response.end('Hello, World! From Node.js HTTP Server') +}).listen() + +await once(nodeServer, 'listening') + +console.log('Node Server listening') + +const nodeServerUndiciClient = new Client(`http://localhost:${nodeServer.address().port}`) + +const fastifyServer = fastify() + +fastifyServer.route({ + url: '/', + method: 'GET', + handler: (request, response) => { + nodeServerUndiciClient.stream({ + path: '/', + method: 'GET', + opaque: response + }, ({ opaque }) => opaque.raw) + } +}) + +await fastifyServer.listen() + +console.log('Fastify Server listening') + +const fastifyServerUndiciClient = new Client(`http://localhost:${fastifyServer.server.address().port}`) + +try { + const { statusCode, body } = await fastifyServerUndiciClient.request({ + path: '/', + method: 'GET' + }) + + console.log(`response received ${statusCode}`) + body.setEncoding('utf8') + body.on('data', console.log) + + nodeServerUndiciClient.close() + fastifyServerUndiciClient.close() + fastifyServer.close() + nodeServer.close() +} catch (error) { } +``` + +### `Dispatcher.upgrade(options[, callback])` + +Upgrade to a different protocol. Visit [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details. + +Arguments: + +* **options** `UpgradeOptions` + +* **callback** `(error: Error | null, data: UpgradeData) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +#### Parameter: `UpgradeOptions` + +* **path** `string` +* **method** `string` (optional) - Default: `'GET'` +* **headers** `UndiciHeaders` (optional) - Default: `null` +* **protocol** `string` (optional) - Default: `'Websocket'` - A string of comma separated protocols, in descending preference order. +* **signal** `AbortSignal | EventEmitter | null` (optional) - Default: `null` + +#### Parameter: `UpgradeData` + +* **headers** `http.IncomingHeaders` +* **socket** `stream.Duplex` +* **opaque** `unknown` + +#### Example 1 - Basic Upgrade Request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.statusCode = 101 + response.setHeader('connection', 'upgrade') + response.setHeader('upgrade', request.headers.upgrade) + response.end() +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { headers, socket } = await client.upgrade({ + path: '/', + }) + socket.on('end', () => { + console.log(`upgrade: ${headers.upgrade}`) // upgrade: Websocket + client.close() + server.close() + }) + socket.end() +} catch (error) { + console.error(error) + client.close() + server.close() +} +``` + +## Instance Events + +### Event: `'connect'` + +Parameters: + +* **origin** `URL` +* **targets** `Array` + +### Event: `'disconnect'` + +Parameters: + +* **origin** `URL` +* **targets** `Array` +* **error** `Error` + +### Event: `'connectionError'` + +Parameters: + +* **origin** `URL` +* **targets** `Array` +* **error** `Error` + +Emitted when dispatcher fails to connect to +origin. + +### Event: `'drain'` + +Parameters: + +* **origin** `URL` + +Emitted when dispatcher is no longer busy. + +## Parameter: `UndiciHeaders` + +* `Record | string[] | null` + +Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in two forms; either as an object specified by the `Record` (`IncomingHttpHeaders`) type, or an array of strings. An array representation of a header list must have an even length or an `InvalidArgumentError` will be thrown. + +Keys are lowercase and values are not modified. + +Response headers will derive a `host` from the `url` of the [Client](Client.md#class-client) instance if no `host` header was previously specified. + +### Example 1 - Object + +```js +{ + 'content-length': '123', + 'content-type': 'text/plain', + connection: 'keep-alive', + host: 'mysite.com', + accept: '*/*' +} +``` + +### Example 2 - Array + +```js +[ + 'content-length', '123', + 'content-type', 'text/plain', + 'connection', 'keep-alive', + 'host', 'mysite.com', + 'accept', '*/*' +] +``` diff --git a/node_modules/undici/docs/api/Errors.md b/node_modules/undici/docs/api/Errors.md new file mode 100644 index 0000000..917e45d --- /dev/null +++ b/node_modules/undici/docs/api/Errors.md @@ -0,0 +1,47 @@ +# Errors + +Undici exposes a variety of error objects that you can use to enhance your error handling. +You can find all the error objects inside the `errors` key. + +```js +import { errors } from 'undici' +``` + +| Error | Error Codes | Description | +| ------------------------------------ | ------------------------------------- | ------------------------------------------------------------------------- | +| `UndiciError` | `UND_ERR` | all errors below are extended from `UndiciError`. | +| `ConnectTimeoutError` | `UND_ERR_CONNECT_TIMEOUT` | socket is destroyed due to connect timeout. | +| `HeadersTimeoutError` | `UND_ERR_HEADERS_TIMEOUT` | socket is destroyed due to headers timeout. | +| `HeadersOverflowError` | `UND_ERR_HEADERS_OVERFLOW` | socket is destroyed due to headers' max size being exceeded. | +| `BodyTimeoutError` | `UND_ERR_BODY_TIMEOUT` | socket is destroyed due to body timeout. | +| `ResponseStatusCodeError` | `UND_ERR_RESPONSE_STATUS_CODE` | an error is thrown when `throwOnError` is `true` for status codes >= 400. | +| `InvalidArgumentError` | `UND_ERR_INVALID_ARG` | passed an invalid argument. | +| `InvalidReturnValueError` | `UND_ERR_INVALID_RETURN_VALUE` | returned an invalid value. | +| `RequestAbortedError` | `UND_ERR_ABORTED` | the request has been aborted by the user | +| `ClientDestroyedError` | `UND_ERR_DESTROYED` | trying to use a destroyed client. | +| `ClientClosedError` | `UND_ERR_CLOSED` | trying to use a closed client. | +| `SocketError` | `UND_ERR_SOCKET` | there is an error with the socket. | +| `NotSupportedError` | `UND_ERR_NOT_SUPPORTED` | encountered unsupported functionality. | +| `RequestContentLengthMismatchError` | `UND_ERR_REQ_CONTENT_LENGTH_MISMATCH` | request body does not match content-length header | +| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header | +| `InformationalError` | `UND_ERR_INFO` | expected error with reason | +| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed | + +### `SocketError` + +The `SocketError` has a `.socket` property which holds socket metadata: + +```ts +interface SocketInfo { + localAddress?: string + localPort?: number + remoteAddress?: string + remotePort?: number + remoteFamily?: string + timeout?: number + bytesWritten?: number + bytesRead?: number +} +``` + +Be aware that in some cases the `.socket` property can be `null`. diff --git a/node_modules/undici/docs/api/Fetch.md b/node_modules/undici/docs/api/Fetch.md new file mode 100644 index 0000000..b5a6242 --- /dev/null +++ b/node_modules/undici/docs/api/Fetch.md @@ -0,0 +1,27 @@ +# Fetch + +Undici exposes a fetch() method starts the process of fetching a resource from the network. + +Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch). + +## File + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File) + +In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one. + +## FormData + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData) + +## Response + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response) + +## Request + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request) + +## Header + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers) diff --git a/node_modules/undici/docs/api/MockAgent.md b/node_modules/undici/docs/api/MockAgent.md new file mode 100644 index 0000000..85ae690 --- /dev/null +++ b/node_modules/undici/docs/api/MockAgent.md @@ -0,0 +1,540 @@ +# Class: MockAgent + +Extends: `undici.Dispatcher` + +A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead. + +## `new MockAgent([options])` + +Arguments: + +* **options** `MockAgentOptions` (optional) - It extends the `Agent` options. + +Returns: `MockAgent` + +### Parameter: `MockAgentOptions` + +Extends: [`AgentOptions`](Agent.md#parameter-agentoptions) + +* **agent** `Agent` (optional) - Default: `new Agent([options])` - a custom agent encapsulated by the MockAgent. + +### Example - Basic MockAgent instantiation + +This will instantiate the MockAgent. It will not do anything until registered as the agent to use with requests and mock interceptions are added. + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() +``` + +### Example - Basic MockAgent instantiation with custom agent + +```js +import { Agent, MockAgent } from 'undici' + +const agent = new Agent() + +const mockAgent = new MockAgent({ agent }) +``` + +## Instance Methods + +### `MockAgent.get(origin)` + +This method creates and retrieves MockPool or MockClient instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned. + +For subsequent `MockAgent.get` calls on the same origin, the same mock instance will be returned. + +Arguments: + +* **origin** `string | RegExp | (value) => boolean` - a matcher for the pool origin to be retrieved from the MockAgent. + +| Matcher type | Condition to pass | +|:------------:| -------------------------- | +| `string` | Exact match against string | +| `RegExp` | Regex must pass | +| `Function` | Function must return true | + +Returns: `MockClient | MockPool`. + +| `MockAgentOptions` | Mock instance returned | +| -------------------- | ---------------------- | +| `connections === 1` | `MockClient` | +| `connections` > `1` | `MockPool` | + +#### Example - Basic Mocked Request + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { statusCode, body } = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked Request with local mock agent dispatcher + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: mockAgent }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked Request with local mock pool dispatcher + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: mockPool }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked Request with local mock client dispatcher + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent({ connections: 1 }) + +const mockClient = mockAgent.get('http://localhost:3000') +mockClient.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: mockClient }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked requests with multiple intercepts + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') +mockPool.intercept({ path: '/hello'}).reply(200, 'hello') + +const result1 = await request('http://localhost:3000/foo') + +console.log('response received', result1.statusCode) // response received 200 + +for await (const data of result1.body) { + console.log('data', data.toString('utf8')) // data foo +} + +const result2 = await request('http://localhost:3000/hello') + +console.log('response received', result2.statusCode) // response received 200 + +for await (const data of result2.body) { + console.log('data', data.toString('utf8')) // data hello +} +``` +#### Example - Mock different requests within the same file +```js +const { MockAgent, setGlobalDispatcher } = require('undici'); +const agent = new MockAgent(); +agent.disableNetConnect(); +setGlobalDispatcher(agent); +describe('Test', () => { + it('200', async () => { + const mockAgent = agent.get('http://test.com'); + // your test + }); + it('200', async () => { + const mockAgent = agent.get('http://testing.com'); + // your test + }); +}); +``` + +#### Example - Mocked request with query body, headers and trailers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' +}).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } +}) + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo?hello=there&see=ya', { + method: 'POST', + body: 'form1=data1&form2=data2' +}) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // '{"foo":"bar"}' +} + +console.log('trailers', trailers) // { 'content-md5': 'test' } +``` + +#### Example - Mocked request with origin regex + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get(new RegExp('http://localhost:3000')) +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Mocked request with origin function + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get((origin) => origin === 'http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +### `MockAgent.close()` + +Closes the mock agent and waits for registered mock pools and clients to also close before resolving. + +Returns: `Promise` + +#### Example - clean up after tests are complete + +```js +import { MockAgent, setGlobalDispatcher } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +await mockAgent.close() +``` + +### `MockAgent.dispatch(options, handlers)` + +Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions). + +### `MockAgent.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +#### Example - MockAgent request + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await mockAgent.request({ + origin: 'http://localhost:3000', + path: '/foo', + method: 'GET' +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +### `MockAgent.deactivate()` + +This method disables mocking in MockAgent. + +Returns: `void` + +#### Example - Deactivate Mocking + +```js +import { MockAgent, setGlobalDispatcher } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.deactivate() +``` + +### `MockAgent.activate()` + +This method enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called. + +Returns: `void` + +#### Example - Activate Mocking + +```js +import { MockAgent, setGlobalDispatcher } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.deactivate() +// No mocking will occur + +// Later +mockAgent.activate() +``` + +### `MockAgent.enableNetConnect([host])` + +When requests are not matched in a MockAgent intercept, a real HTTP request is attempted. We can control this further through the use of `enableNetConnect`. This is achieved by defining host matchers so only matching requests will be attempted. + +When using a string, it should only include the **hostname and optionally, the port**. In addition, calling this method multiple times with a string will allow all HTTP requests that match these values. + +Arguments: + +* **host** `string | RegExp | (value) => boolean` - (optional) + +Returns: `void` + +#### Example - Allow all non-matching urls to be dispatched in a real HTTP request + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect() + +await request('http://example.com') +// A real request is made +``` + +#### Example - Allow requests matching a host string to make real requests + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect('example-1.com') +mockAgent.enableNetConnect('example-2.com:8080') + +await request('http://example-1.com') +// A real request is made + +await request('http://example-2.com:8080') +// A real request is made + +await request('http://example-3.com') +// Will throw +``` + +#### Example - Allow requests matching a host regex to make real requests + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect(new RegExp('example.com')) + +await request('http://example.com') +// A real request is made +``` + +#### Example - Allow requests matching a host function to make real requests + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect((value) => value === 'example.com') + +await request('http://example.com') +// A real request is made +``` + +### `MockAgent.disableNetConnect()` + +This method causes all requests to throw when requests are not matched in a MockAgent intercept. + +Returns: `void` + +#### Example - Disable all non-matching requests by throwing an error for each + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent() + +mockAgent.disableNetConnect() + +await request('http://example.com') +// Will throw +``` + +### `MockAgent.pendingInterceptors()` + +This method returns any pending interceptors registered on a mock agent. A pending interceptor meets one of the following criteria: + +- Is registered with neither `.times()` nor `.persist()`, and has not been invoked; +- Is persistent (i.e., registered with `.persist()`) and has not been invoked; +- Is registered with `.times()` and has not been invoked `` of times. + +Returns: `PendingInterceptor[]` (where `PendingInterceptor` is a `MockDispatch` with an additional `origin: string`) + +#### Example - List all pending inteceptors + +```js +const agent = new MockAgent() +agent.disableNetConnect() + +agent + .get('https://example.com') + .intercept({ method: 'GET', path: '/' }) + .reply(200) + +const pendingInterceptors = agent.pendingInterceptors() +// Returns [ +// { +// timesInvoked: 0, +// times: 1, +// persist: false, +// consumed: false, +// pending: true, +// path: '/', +// method: 'GET', +// body: undefined, +// headers: undefined, +// data: { +// error: null, +// statusCode: 200, +// data: '', +// headers: {}, +// trailers: {} +// }, +// origin: 'https://example.com' +// } +// ] +``` + +### `MockAgent.assertNoPendingInterceptors([options])` + +This method throws if the mock agent has any pending interceptors. A pending interceptor meets one of the following criteria: + +- Is registered with neither `.times()` nor `.persist()`, and has not been invoked; +- Is persistent (i.e., registered with `.persist()`) and has not been invoked; +- Is registered with `.times()` and has not been invoked `` of times. + +#### Example - Check that there are no pending interceptors + +```js +const agent = new MockAgent() +agent.disableNetConnect() + +agent + .get('https://example.com') + .intercept({ method: 'GET', path: '/' }) + .reply(200) + +agent.assertNoPendingInterceptors() +// Throws an UndiciError with the following message: +// +// 1 interceptor is pending: +// +// ┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┐ +// │ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │ +// ├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤ +// │ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ '❌' │ 0 │ 1 │ +// └─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘ +``` diff --git a/node_modules/undici/docs/api/MockClient.md b/node_modules/undici/docs/api/MockClient.md new file mode 100644 index 0000000..ac54691 --- /dev/null +++ b/node_modules/undici/docs/api/MockClient.md @@ -0,0 +1,77 @@ +# Class: MockClient + +Extends: `undici.Client` + +A mock client class that implements the same api as [MockPool](MockPool.md). + +## `new MockClient(origin, [options])` + +Arguments: + +* **origin** `string` - It should only include the **protocol, hostname, and port**. +* **options** `MockClientOptions` - It extends the `Client` options. + +Returns: `MockClient` + +### Parameter: `MockClientOptions` + +Extends: `ClientOptions` + +* **agent** `Agent` - the agent to associate this MockClient with. + +### Example - Basic MockClient instantiation + +We can use MockAgent to instantiate a MockClient ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered. + +```js +import { MockAgent } from 'undici' + +// Connections must be set to 1 to return a MockClient instance +const mockAgent = new MockAgent({ connections: 1 }) + +const mockClient = mockAgent.get('http://localhost:3000') +``` + +## Instance Methods + +### `MockClient.intercept(options)` + +Implements: [`MockPool.intercept(options)`](MockPool.md#mockpoolinterceptoptions) + +### `MockClient.close()` + +Implements: [`MockPool.close()`](MockPool.md#mockpoolclose) + +### `MockClient.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `MockClient.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +#### Example - MockClient request + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent({ connections: 1 }) + +const mockClient = mockAgent.get('http://localhost:3000') +mockClient.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await mockClient.request({ + origin: 'http://localhost:3000', + path: '/foo', + method: 'GET' +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` diff --git a/node_modules/undici/docs/api/MockErrors.md b/node_modules/undici/docs/api/MockErrors.md new file mode 100644 index 0000000..c1aa3db --- /dev/null +++ b/node_modules/undici/docs/api/MockErrors.md @@ -0,0 +1,12 @@ +# MockErrors + +Undici exposes a variety of mock error objects that you can use to enhance your mock error handling. +You can find all the mock error objects inside the `mockErrors` key. + +```js +import { mockErrors } from 'undici' +``` + +| Mock Error | Mock Error Codes | Description | +| --------------------- | ------------------------------- | ---------------------------------------------------------- | +| `MockNotMatchedError` | `UND_MOCK_ERR_MOCK_NOT_MATCHED` | The request does not match any registered mock dispatches. | diff --git a/node_modules/undici/docs/api/MockPool.md b/node_modules/undici/docs/api/MockPool.md new file mode 100644 index 0000000..96a986f --- /dev/null +++ b/node_modules/undici/docs/api/MockPool.md @@ -0,0 +1,547 @@ +# Class: MockPool + +Extends: `undici.Pool` + +A mock Pool class that implements the Pool API and is used by MockAgent to intercept real requests and return mocked responses. + +## `new MockPool(origin, [options])` + +Arguments: + +* **origin** `string` - It should only include the **protocol, hostname, and port**. +* **options** `MockPoolOptions` - It extends the `Pool` options. + +Returns: `MockPool` + +### Parameter: `MockPoolOptions` + +Extends: `PoolOptions` + +* **agent** `Agent` - the agent to associate this MockPool with. + +### Example - Basic MockPool instantiation + +We can use MockAgent to instantiate a MockPool ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered. + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +``` + +## Instance Methods + +### `MockPool.intercept(options)` + +This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once. + +When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted. + +| Matcher type | Condition to pass | +|:------------:| -------------------------- | +| `string` | Exact match against string | +| `RegExp` | Regex must pass | +| `Function` | Function must return true | + +Arguments: + +* **options** `MockPoolInterceptOptions` - Interception options. + +Returns: `MockInterceptor` corresponding to the input options. + +### Parameter: `MockPoolInterceptOptions` + +* **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. When a `RegExp` or callback is used, it will match against the request path including all query parameters in alphabetical order. When a `string` is provided, the query parameters can be conveniently specified through the `MockPoolInterceptOptions.query` setting. +* **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`. +* **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body. +* **headers** `Record boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way. +* **query** `Record | null` - (optional) - a matcher for the HTTP request query string params. Only applies when a `string` was provided for `MockPoolInterceptOptions.path`. + +### Return: `MockInterceptor` + +We can define the behaviour of an intercepted request with the following options. + +* **reply** `(statusCode: number, replyData: string | Buffer | object | MockInterceptor.MockResponseDataHandler, responseOptions?: MockResponseOptions) => MockScope` - define a reply for a matching request. You can define the replyData as a callback to read incoming request data. Default for `responseOptions` is `{}`. +* **reply** `(callback: MockInterceptor.MockReplyOptionsCallback) => MockScope` - define a reply for a matching request, allowing dynamic mocking of all reply options rather than just the data. +* **replyWithError** `(error: Error) => MockScope` - define an error for a matching request to throw. +* **defaultReplyHeaders** `(headers: Record) => MockInterceptor` - define default headers to be included in subsequent replies. These are in addition to headers on a specific reply. +* **defaultReplyTrailers** `(trailers: Record) => MockInterceptor` - define default trailers to be included in subsequent replies. These are in addition to trailers on a specific reply. +* **replyContentLength** `() => MockInterceptor` - define automatically calculated `content-length` headers to be included in subsequent replies. + +The reply data of an intercepted request may either be a string, buffer, or JavaScript object. Objects are converted to JSON while strings and buffers are sent as-is. + +By default, `reply` and `replyWithError` define the behaviour for the first matching request only. Subsequent requests will not be affected (this can be changed using the returned `MockScope`). + +### Parameter: `MockResponseOptions` + +* **headers** `Record` - headers to be included on the mocked reply. +* **trailers** `Record` - trailers to be included on the mocked reply. + +### Return: `MockScope` + +A `MockScope` is associated with a single `MockInterceptor`. With this, we can configure the default behaviour of a intercepted reply. + +* **delay** `(waitInMs: number) => MockScope` - delay the associated reply by a set amount in ms. +* **persist** `() => MockScope` - any matching request will always reply with the defined response indefinitely. +* **times** `(repeatTimes: number) => MockScope` - any matching request will reply with the defined response a fixed amount of times. This is overridden by **persist**. + +#### Example - Basic Mocked Request + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +// MockPool +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Mocked request using reply data callbacks + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/echo', + method: 'GET', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } +}).reply(200, ({ headers }) => ({ message: headers.get('message') })) + +const { statusCode, body, headers } = await request('http://localhost:3000', { + headers: { + message: 'hello world!' + } +}) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // { "message":"hello world!" } +} +``` + +#### Example - Mocked request using reply options callback + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/echo', + method: 'GET', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } +}).reply(({ headers }) => ({ statusCode: 200, data: { message: headers.get('message') }}))) + +const { statusCode, body, headers } = await request('http://localhost:3000', { + headers: { + message: 'hello world!' + } +}) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // { "message":"hello world!" } +} +``` + +#### Example - Basic Mocked requests with multiple intercepts + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).reply(200, 'foo') + +mockPool.intercept({ + path: '/hello', + method: 'GET', +}).reply(200, 'hello') + +const result1 = await request('http://localhost:3000/foo') + +console.log('response received', result1.statusCode) // response received 200 + +for await (const data of result1.body) { + console.log('data', data.toString('utf8')) // data foo +} + +const result2 = await request('http://localhost:3000/hello') + +console.log('response received', result2.statusCode) // response received 200 + +for await (const data of result2.body) { + console.log('data', data.toString('utf8')) // data hello +} +``` + +#### Example - Mocked request with query body, request headers and response headers and trailers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } +}).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } +}) + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo?hello=there&see=ya', { + method: 'POST', + body: 'form1=data1&form2=data2', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } + }) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // '{"foo":"bar"}' +} + +console.log('trailers', trailers) // { 'content-md5': 'test' } +``` + +#### Example - Mocked request using different matchers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: /^GET$/, + body: (value) => value === 'form=data', + headers: { + 'User-Agent': 'undici', + Host: /^example.com$/ + } +}).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { + method: 'GET', + body: 'form=data', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Mocked request with reply with a defined error + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).replyWithError(new Error('kaboom')) + +try { + await request('http://localhost:3000/foo', { + method: 'GET' + }) +} catch (error) { + console.error(error) // Error: kaboom +} +``` + +#### Example - Mocked request with defaultReplyHeaders + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).defaultReplyHeaders({ foo: 'bar' }) + .reply(200, 'foo') + +const { headers } = await request('http://localhost:3000/foo') + +console.log('headers', headers) // headers { foo: 'bar' } +``` + +#### Example - Mocked request with defaultReplyTrailers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).defaultReplyTrailers({ foo: 'bar' }) + .reply(200, 'foo') + +const { trailers } = await request('http://localhost:3000/foo') + +console.log('trailers', trailers) // trailers { foo: 'bar' } +``` + +#### Example - Mocked request with automatic content-length calculation + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).replyContentLength().reply(200, 'foo') + +const { headers } = await request('http://localhost:3000/foo') + +console.log('headers', headers) // headers { 'content-length': '3' } +``` + +#### Example - Mocked request with automatic content-length calculation on an object + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).replyContentLength().reply(200, { foo: 'bar' }) + +const { headers } = await request('http://localhost:3000/foo') + +console.log('headers', headers) // headers { 'content-length': '13' } +``` + +#### Example - Mocked request with persist enabled + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).reply(200, 'foo').persist() + +const result1 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +const result2 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +// Etc +``` + +#### Example - Mocked request with times enabled + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).reply(200, 'foo').times(2) + +const result1 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +const result2 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +const result3 = await request('http://localhost:3000/foo') +// Will not match and make attempt a real request +``` + +#### Example - Mocked request with path callback + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' +import querystring from 'querystring' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +const matchPath = requestPath => { + const [pathname, search] = requestPath.split('?') + const requestQuery = querystring.parse(search) + + if (!pathname.startsWith('/foo')) { + return false + } + + if (!Object.keys(requestQuery).includes('foo') || requestQuery.foo !== 'bar') { + return false + } + + return true +} + +mockPool.intercept({ + path: matchPath, + method: 'GET' +}).reply(200, 'foo') + +const result = await request('http://localhost:3000/foo?foo=bar') +// Will match and return mocked data +``` + +### `MockPool.close()` + +Closes the mock pool and de-registers from associated MockAgent. + +Returns: `Promise` + +#### Example - clean up after tests are complete + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() +const mockPool = mockAgent.get('http://localhost:3000') + +await mockPool.close() +``` + +### `MockPool.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `MockPool.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +#### Example - MockPool request + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ + path: '/foo', + method: 'GET', +}).reply(200, 'foo') + +const { + statusCode, + body +} = await mockPool.request({ + origin: 'http://localhost:3000', + path: '/foo', + method: 'GET' +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` diff --git a/node_modules/undici/docs/api/Pool.md b/node_modules/undici/docs/api/Pool.md new file mode 100644 index 0000000..8fcabac --- /dev/null +++ b/node_modules/undici/docs/api/Pool.md @@ -0,0 +1,84 @@ +# Class: Pool + +Extends: `undici.Dispatcher` + +A pool of [Client](Client.md) instances connected to the same upstream target. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new Pool(url[, options])` + +Arguments: + +* **url** `URL | string` - It should only include the **protocol, hostname, and port**. +* **options** `PoolOptions` (optional) + +### Parameter: `PoolOptions` + +Extends: [`ClientOptions`](Client.md#parameter-clientoptions) + +* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Client(origin, opts)` +* **connections** `number | null` (optional) - Default: `null` - The number of `Client` instances to create. When set to `null`, the `Pool` instance will create an unlimited amount of `Client` instances. +* **interceptors** `{ Pool: DispatchInterceptor[] } }` - Default: `{ Pool: [] }` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). + +## Instance Properties + +### `Pool.closed` + +Implements [Client.closed](Client.md#clientclosed) + +### `Pool.destroyed` + +Implements [Client.destroyed](Client.md#clientdestroyed) + +### `Pool.stats` + +Returns [`PoolStats`](PoolStats.md) instance for this pool. + +## Instance Methods + +### `Pool.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `Pool.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `Pool.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `Pool.dispatch(options, handler)` + +Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `Pool.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `Pool.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `Pool.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `Pool.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). + +## Instance Events + +### Event: `'connect'` + +See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect). + +### Event: `'disconnect'` + +See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect). + +### Event: `'drain'` + +See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain). diff --git a/node_modules/undici/docs/api/PoolStats.md b/node_modules/undici/docs/api/PoolStats.md new file mode 100644 index 0000000..16b6dc2 --- /dev/null +++ b/node_modules/undici/docs/api/PoolStats.md @@ -0,0 +1,35 @@ +# Class: PoolStats + +Aggregate stats for a [Pool](Pool.md) or [BalancedPool](BalancedPool.md). + +## `new PoolStats(pool)` + +Arguments: + +* **pool** `Pool` - Pool or BalancedPool from which to return stats. + +## Instance Properties + +### `PoolStats.connected` + +Number of open socket connections in this pool. + +### `PoolStats.free` + +Number of open socket connections in this pool that do not have an active request. + +### `PoolStats.pending` + +Number of pending requests across all clients in this pool. + +### `PoolStats.queued` + +Number of queued requests across all clients in this pool. + +### `PoolStats.running` + +Number of currently active requests across all clients in this pool. + +### `PoolStats.size` + +Number of active, pending, or queued requests across all clients in this pool. diff --git a/node_modules/undici/docs/api/ProxyAgent.md b/node_modules/undici/docs/api/ProxyAgent.md new file mode 100644 index 0000000..cebfe68 --- /dev/null +++ b/node_modules/undici/docs/api/ProxyAgent.md @@ -0,0 +1,126 @@ +# Class: ProxyAgent + +Extends: `undici.Dispatcher` + +A Proxy Agent class that implements the Agent API. It allows the connection through proxy in a simple way. + +## `new ProxyAgent([options])` + +Arguments: + +* **options** `ProxyAgentOptions` (required) - It extends the `Agent` options. + +Returns: `ProxyAgent` + +### Parameter: `ProxyAgentOptions` + +Extends: [`AgentOptions`](Agent.md#parameter-agentoptions) + +* **uri** `string` (required) - It can be passed either by a string or a object containing `uri` as string. +* **token** `string` (optional) - It can be passed by a string of token for authentication. +* **auth** `string` (**deprecated**) - Use token. +* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)` +* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback). +* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback). + +Examples: + +```js +import { ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +// or +const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' }) +``` + +#### Example - Basic ProxyAgent instantiation + +This will instantiate the ProxyAgent. It will not do anything until registered as the agent to use with requests. + +```js +import { ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +``` + +#### Example - Basic Proxy Request with global agent dispatcher + +```js +import { setGlobalDispatcher, request, ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +setGlobalDispatcher(proxyAgent) + +const { statusCode, body } = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Proxy Request with local agent dispatcher + +```js +import { ProxyAgent, request } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: proxyAgent }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Proxy Request with authentication + +```js +import { setGlobalDispatcher, request, ProxyAgent } from 'undici'; + +const proxyAgent = new ProxyAgent({ + uri: 'my.proxy.server', + // token: 'Bearer xxxx' + token: `Basic ${Buffer.from('username:password').toString('base64')}` +}); +setGlobalDispatcher(proxyAgent); + +const { statusCode, body } = await request('http://localhost:3000/foo'); + +console.log('response received', statusCode); // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')); // data foo +} +``` + +### `ProxyAgent.close()` + +Closes the proxy agent and waits for registered pools and clients to also close before resolving. + +Returns: `Promise` + +#### Example - clean up after tests are complete + +```js +import { ProxyAgent, setGlobalDispatcher } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +setGlobalDispatcher(proxyAgent) + +await proxyAgent.close() +``` + +### `ProxyAgent.dispatch(options, handlers)` + +Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions). + +### `ProxyAgent.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). diff --git a/node_modules/undici/docs/api/RetryHandler.md b/node_modules/undici/docs/api/RetryHandler.md new file mode 100644 index 0000000..2323ce4 --- /dev/null +++ b/node_modules/undici/docs/api/RetryHandler.md @@ -0,0 +1,108 @@ +# Class: RetryHandler + +Extends: `undici.DispatcherHandlers` + +A handler class that implements the retry logic for a request. + +## `new RetryHandler(dispatchOptions, retryHandlers, [retryOptions])` + +Arguments: + +- **options** `Dispatch.DispatchOptions & RetryOptions` (required) - It is an intersection of `Dispatcher.DispatchOptions` and `RetryOptions`. +- **retryHandlers** `RetryHandlers` (required) - Object containing the `dispatch` to be used on every retry, and `handler` for handling the `dispatch` lifecycle. + +Returns: `retryHandler` + +### Parameter: `Dispatch.DispatchOptions & RetryOptions` + +Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions). + +#### `RetryOptions` + +- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed. +- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5` +- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds) +- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second) +- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2` +- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true` +- +- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']` +- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]` +- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', + +**`RetryContext`** + +- `state`: `RetryState` - Current retry state. It can be mutated. +- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler. + +### Parameter `RetryHandlers` + +- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise` (required) - Dispatch function to be called after every retry. +- **handler** Extends [`Dispatch.DispatchHandlers`](Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted. + +Examples: + +```js +const client = new Client(`http://localhost:${server.address().port}`); +const chunks = []; +const handler = new RetryHandler( + { + ...dispatchOptions, + retryOptions: { + // custom retry function + retry: function (err, state, callback) { + counter++; + + if (err.code && err.code === "UND_ERR_DESTROYED") { + callback(err); + return; + } + + if (err.statusCode === 206) { + callback(err); + return; + } + + setTimeout(() => callback(null), 1000); + }, + }, + }, + { + dispatch: (...args) => { + return client.dispatch(...args); + }, + handler: { + onConnect() {}, + onBodySent() {}, + onHeaders(status, _rawHeaders, resume, _statusMessage) { + // do something with headers + }, + onData(chunk) { + chunks.push(chunk); + return true; + }, + onComplete() {}, + onError() { + // handle error properly + }, + }, + } +); +``` + +#### Example - Basic RetryHandler with defaults + +```js +const client = new Client(`http://localhost:${server.address().port}`); +const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect() {}, + onBodySent() {}, + onHeaders(status, _rawHeaders, resume, _statusMessage) {}, + onData(chunk) {}, + onComplete() {}, + onError(err) {}, + }, +}); +``` diff --git a/node_modules/undici/docs/api/WebSocket.md b/node_modules/undici/docs/api/WebSocket.md new file mode 100644 index 0000000..9d374f4 --- /dev/null +++ b/node_modules/undici/docs/api/WebSocket.md @@ -0,0 +1,43 @@ +# Class: WebSocket + +> ⚠️ Warning: the WebSocket API is experimental. + +Extends: [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget) + +The WebSocket object provides a way to manage a WebSocket connection to a server, allowing bidirectional communication. The API follows the [WebSocket spec](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) and [RFC 6455](https://datatracker.ietf.org/doc/html/rfc6455). + +## `new WebSocket(url[, protocol])` + +Arguments: + +* **url** `URL | string` - The url's protocol *must* be `ws` or `wss`. +* **protocol** `string | string[] | WebSocketInit` (optional) - Subprotocol(s) to request the server use, or a [`Dispatcher`](./Dispatcher.md). + +### Example: + +This example will not work in browsers or other platforms that don't allow passing an object. + +```mjs +import { WebSocket, ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') + +const ws = new WebSocket('wss://echo.websocket.events', { + dispatcher: proxyAgent, + protocols: ['echo', 'chat'] +}) +``` + +If you do not need a custom Dispatcher, it's recommended to use the following pattern: + +```mjs +import { WebSocket } from 'undici' + +const ws = new WebSocket('wss://echo.websocket.events', ['echo', 'chat']) +``` + +## Read More + +- [MDN - WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) +- [The WebSocket Specification](https://www.rfc-editor.org/rfc/rfc6455) +- [The WHATWG WebSocket Specification](https://websockets.spec.whatwg.org/) diff --git a/node_modules/undici/docs/api/api-lifecycle.md b/node_modules/undici/docs/api/api-lifecycle.md new file mode 100644 index 0000000..d158126 --- /dev/null +++ b/node_modules/undici/docs/api/api-lifecycle.md @@ -0,0 +1,62 @@ +# Client Lifecycle + +An Undici [Client](Client.md) can be best described as a state machine. The following list is a summary of the various state transitions the `Client` will go through in its lifecycle. This document also contains detailed breakdowns of each state. + +> This diagram is not a perfect representation of the undici Client. Since the Client class is not actually implemented as a state-machine, actual execution may deviate slightly from what is described below. Consider this as a general resource for understanding the inner workings of the Undici client rather than some kind of formal specification. + +## State Transition Overview + +* A `Client` begins in the **idle** state with no socket connection and no requests in queue. + * The *connect* event transitions the `Client` to the **pending** state where requests can be queued prior to processing. + * The *close* and *destroy* events transition the `Client` to the **destroyed** state. Since there are no requests in the queue, the *close* event immediately transitions to the **destroyed** state. +* The **pending** state indicates the underlying socket connection has been successfully established and requests are queueing. + * The *process* event transitions the `Client` to the **processing** state where requests are processed. + * If requests are queued, the *close* event transitions to the **processing** state; otherwise, it transitions to the **destroyed** state. + * The *destroy* event transitions to the **destroyed** state. +* The **processing** state initializes to the **processing.running** state. + * If the current request requires draining, the *needDrain* event transitions the `Client` into the **processing.busy** state which will return to the **processing.running** state with the *drainComplete* event. + * After all queued requests are completed, the *keepalive* event transitions the `Client` back to the **pending** state. If no requests are queued during the timeout, the **close** event transitions the `Client` to the **destroyed** state. + * If the *close* event is fired while the `Client` still has queued requests, the `Client` transitions to the **process.closing** state where it will complete all existing requests before firing the *done* event. + * The *done* event gracefully transitions the `Client` to the **destroyed** state. + * At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests. +* The **destroyed** state is a final state and the `Client` is no longer functional. + +![A state diagram representing an Undici Client instance](../assets/lifecycle-diagram.png) + +> The diagram was generated using Mermaid.js Live Editor. Modify the state diagram [here](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoic3RhdGVEaWFncmFtLXYyXG4gICAgWypdIC0tPiBpZGxlXG4gICAgaWRsZSAtLT4gcGVuZGluZyA6IGNvbm5lY3RcbiAgICBpZGxlIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95L2Nsb3NlXG4gICAgXG4gICAgcGVuZGluZyAtLT4gaWRsZSA6IHRpbWVvdXRcbiAgICBwZW5kaW5nIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95XG5cbiAgICBzdGF0ZSBjbG9zZV9mb3JrIDw8Zm9yaz4-XG4gICAgcGVuZGluZyAtLT4gY2xvc2VfZm9yayA6IGNsb3NlXG4gICAgY2xvc2VfZm9yayAtLT4gcHJvY2Vzc2luZ1xuICAgIGNsb3NlX2ZvcmsgLS0-IGRlc3Ryb3llZFxuXG4gICAgcGVuZGluZyAtLT4gcHJvY2Vzc2luZyA6IHByb2Nlc3NcblxuICAgIHByb2Nlc3NpbmcgLS0-IHBlbmRpbmcgOiBrZWVwYWxpdmVcbiAgICBwcm9jZXNzaW5nIC0tPiBkZXN0cm95ZWQgOiBkb25lXG4gICAgcHJvY2Vzc2luZyAtLT4gZGVzdHJveWVkIDogZGVzdHJveVxuXG4gICAgc3RhdGUgcHJvY2Vzc2luZyB7XG4gICAgICAgIHJ1bm5pbmcgLS0-IGJ1c3kgOiBuZWVkRHJhaW5cbiAgICAgICAgYnVzeSAtLT4gcnVubmluZyA6IGRyYWluQ29tcGxldGVcbiAgICAgICAgcnVubmluZyAtLT4gWypdIDoga2VlcGFsaXZlXG4gICAgICAgIHJ1bm5pbmcgLS0-IGNsb3NpbmcgOiBjbG9zZVxuICAgICAgICBjbG9zaW5nIC0tPiBbKl0gOiBkb25lXG4gICAgICAgIFsqXSAtLT4gcnVubmluZ1xuICAgIH1cbiAgICAiLCJtZXJtYWlkIjp7InRoZW1lIjoiYmFzZSJ9LCJ1cGRhdGVFZGl0b3IiOmZhbHNlfQ) + +## State details + +### idle + +The **idle** state is the initial state of a `Client` instance. While an `origin` is required for instantiating a `Client` instance, the underlying socket connection will not be established until a request is queued using [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers). By calling `Client.dispatch()` directly or using one of the multiple implementations ([`Client.connect()`](Client.md#clientconnectoptions-callback), [`Client.pipeline()`](Client.md#clientpipelineoptions-handler), [`Client.request()`](Client.md#clientrequestoptions-callback), [`Client.stream()`](Client.md#clientstreamoptions-factory-callback), and [`Client.upgrade()`](Client.md#clientupgradeoptions-callback)), the `Client` instance will transition from **idle** to [**pending**](#pending) and then most likely directly to [**processing**](#processing). + +Calling [`Client.close()`](Client.md#clientclosecallback) or [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state since the `Client` instance will have no queued requests in this state. + +### pending + +The **pending** state signifies a non-processing `Client`. Upon entering this state, the `Client` establishes a socket connection and emits the [`'connect'`](Client.md#event-connect) event signalling a connection was successfully established with the `origin` provided during `Client` instantiation. The internal queue is initially empty, and requests can start queueing. + +Calling [`Client.close()`](Client.md#clientclosecallback) with queued requests, transitions the `Client` to the [**processing**](#processing) state. Without queued requests, it transitions to the [**destroyed**](#destroyed) state. + +Calling [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state regardless of existing requests. + +### processing + +The **processing** state is a state machine within itself. It initializes to the [**processing.running**](#running) state. The [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers), [`Client.close()`](Client.md#clientclosecallback), and [`Client.destroy()`](Client.md#clientdestroyerror-callback) can be called at any time while the `Client` is in this state. `Client.dispatch()` will add more requests to the queue while existing requests continue to be processed. `Client.close()` will transition to the [**processing.closing**](#closing) state. And `Client.destroy()` will transition to [**destroyed**](#destroyed). + +#### running + +In the **processing.running** sub-state, queued requests are being processed in a FIFO order. If a request body requires draining, the *needDrain* event transitions to the [**processing.busy**](#busy) sub-state. The *close* event transitions the Client to the [**process.closing**](#closing) sub-state. If all queued requests are processed and neither [`Client.close()`](Client.md#clientclosecallback) nor [`Client.destroy()`](Client.md#clientdestroyerror-callback) are called, then the [**processing**](#processing) machine will trigger a *keepalive* event transitioning the `Client` back to the [**pending**](#pending) state. During this time, the `Client` is waiting for the socket connection to timeout, and once it does, it triggers the *timeout* event and transitions to the [**idle**](#idle) state. + +#### busy + +This sub-state is only entered when a request body is an instance of [Stream](https://nodejs.org/api/stream.html) and requires draining. The `Client` cannot process additional requests while in this state and must wait until the currently processing request body is completely drained before transitioning back to [**processing.running**](#running). + +#### closing + +This sub-state is only entered when a `Client` instance has queued requests and the [`Client.close()`](Client.md#clientclosecallback) method is called. In this state, the `Client` instance continues to process requests as usual, with the one exception that no additional requests can be queued. Once all of the queued requests are processed, the `Client` will trigger the *done* event gracefully entering the [**destroyed**](#destroyed) state without an error. + +### destroyed + +The **destroyed** state is a final state for the `Client` instance. Once in this state, a `Client` is nonfunctional. Calling any other `Client` methods will result in an `ClientDestroyedError`. diff --git a/node_modules/undici/docs/assets/lifecycle-diagram.png b/node_modules/undici/docs/assets/lifecycle-diagram.png new file mode 100644 index 0000000..4ef17b5 Binary files /dev/null and b/node_modules/undici/docs/assets/lifecycle-diagram.png differ diff --git a/node_modules/undici/docs/best-practices/client-certificate.md b/node_modules/undici/docs/best-practices/client-certificate.md new file mode 100644 index 0000000..4fc84ec --- /dev/null +++ b/node_modules/undici/docs/best-practices/client-certificate.md @@ -0,0 +1,64 @@ +# Client certificate + +Client certificate authentication can be configured with the `Client`, the required options are passed along through the `connect` option. + +The client certificates must be signed by a trusted CA. The Node.js default is to trust the well-known CAs curated by Mozilla. + +Setting the server option `requestCert: true` tells the server to request the client certificate. + +The server option `rejectUnauthorized: false` allows us to handle any invalid certificate errors in client code. The `authorized` property on the socket of the incoming request will show if the client certificate was valid. The `authorizationError` property will give the reason if the certificate was not valid. + +### Client Certificate Authentication + +```js +const { readFileSync } = require('fs') +const { join } = require('path') +const { createServer } = require('https') +const { Client } = require('undici') + +const serverOptions = { + ca: [ + readFileSync(join(__dirname, 'client-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'server-key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'server-crt.pem'), 'utf8'), + requestCert: true, + rejectUnauthorized: false +} + +const server = createServer(serverOptions, (req, res) => { + // true if client cert is valid + if(req.client.authorized === true) { + console.log('valid') + } else { + console.error(req.client.authorizationError) + } + res.end() +}) + +server.listen(0, function () { + const tls = { + ca: [ + readFileSync(join(__dirname, 'server-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'client-key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'client-crt.pem'), 'utf8'), + rejectUnauthorized: false, + servername: 'agent1' + } + const client = new Client(`https://localhost:${server.address().port}`, { + connect: tls + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + body.on('data', (buf) => {}) + body.on('end', () => { + client.close() + server.close() + }) + }) +}) +``` diff --git a/node_modules/undici/docs/best-practices/mocking-request.md b/node_modules/undici/docs/best-practices/mocking-request.md new file mode 100644 index 0000000..6954392 --- /dev/null +++ b/node_modules/undici/docs/best-practices/mocking-request.md @@ -0,0 +1,136 @@ +# Mocking Request + +Undici has its own mocking [utility](../api/MockAgent.md). It allow us to intercept undici HTTP requests and return mocked values instead. It can be useful for testing purposes. + +Example: + +```js +// bank.mjs +import { request } from 'undici' + +export async function bankTransfer(recipient, amount) { + const { body } = await request('http://localhost:3000/bank-transfer', + { + method: 'POST', + headers: { + 'X-TOKEN-SECRET': 'SuperSecretToken', + }, + body: JSON.stringify({ + recipient, + amount + }) + } + ) + return await body.json() +} +``` + +And this is what the test file looks like: + +```js +// index.test.mjs +import { strict as assert } from 'assert' +import { MockAgent, setGlobalDispatcher, } from 'undici' +import { bankTransfer } from './bank.mjs' + +const mockAgent = new MockAgent(); + +setGlobalDispatcher(mockAgent); + +// Provide the base url to the request +const mockPool = mockAgent.get('http://localhost:3000'); + +// intercept the request +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', + headers: { + 'X-TOKEN-SECRET': 'SuperSecretToken', + }, + body: JSON.stringify({ + recipient: '1234567890', + amount: '100' + }) +}).reply(200, { + message: 'transaction processed' +}) + +const success = await bankTransfer('1234567890', '100') + +assert.deepEqual(success, { message: 'transaction processed' }) + +// if you dont want to check whether the body or the headers contain the same value +// just remove it from interceptor +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', +}).reply(400, { + message: 'bank account not found' +}) + +const badRequest = await bankTransfer('1234567890', '100') + +assert.deepEqual(badRequest, { message: 'bank account not found' }) +``` + +Explore other MockAgent functionality [here](../api/MockAgent.md) + +## Debug Mock Value + +When the interceptor and the request options are not the same, undici will automatically make a real HTTP request. To prevent real requests from being made, use `mockAgent.disableNetConnect()`: + +```js +const mockAgent = new MockAgent(); + +setGlobalDispatcher(mockAgent); +mockAgent.disableNetConnect() + +// Provide the base url to the request +const mockPool = mockAgent.get('http://localhost:3000'); + +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', +}).reply(200, { + message: 'transaction processed' +}) + +const badRequest = await bankTransfer('1234567890', '100') +// Will throw an error +// MockNotMatchedError: Mock dispatch not matched for path '/bank-transfer': +// subsequent request to origin http://localhost:3000 was not allowed (net.connect disabled) +``` + +## Reply with data based on request + +If the mocked response needs to be dynamically derived from the request parameters, you can provide a function instead of an object to `reply`: + +```js +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', + headers: { + 'X-TOKEN-SECRET': 'SuperSecretToken', + }, + body: JSON.stringify({ + recipient: '1234567890', + amount: '100' + }) +}).reply(200, (opts) => { + // do something with opts + + return { message: 'transaction processed' } +}) +``` + +in this case opts will be + +``` +{ + method: 'POST', + headers: { 'X-TOKEN-SECRET': 'SuperSecretToken' }, + body: '{"recipient":"1234567890","amount":"100"}', + origin: 'http://localhost:3000', + path: '/bank-transfer' +} +``` diff --git a/node_modules/undici/docs/best-practices/proxy.md b/node_modules/undici/docs/best-practices/proxy.md new file mode 100644 index 0000000..bf10295 --- /dev/null +++ b/node_modules/undici/docs/best-practices/proxy.md @@ -0,0 +1,127 @@ +# Connecting through a proxy + +Connecting through a proxy is possible by: + +- Using [AgentProxy](../api/ProxyAgent.md). +- Configuring `Client` or `Pool` constructor. + +The proxy url should be passed to the `Client` or `Pool` constructor, while the upstream server url +should be added to every request call in the `path`. +For instance, if you need to send a request to the `/hello` route of your upstream server, +the `path` should be `path: 'http://upstream.server:port/hello?foo=bar'`. + +If you proxy requires basic authentication, you can send it via the `proxy-authorization` header. + +### Connect without authentication + +```js +import { Client } from 'undici' +import { createServer } from 'http' +import proxy from 'proxy' + +const server = await buildServer() +const proxyServer = await buildProxy() + +const serverUrl = `http://localhost:${server.address().port}` +const proxyUrl = `http://localhost:${proxyServer.address().port}` + +server.on('request', (req, res) => { + console.log(req.url) // '/hello?foo=bar' + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) +}) + +const client = new Client(proxyUrl) + +const response = await client.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar' +}) + +response.body.setEncoding('utf8') +let data = '' +for await (const chunk of response.body) { + data += chunk +} +console.log(response.statusCode) // 200 +console.log(JSON.parse(data)) // { hello: 'world' } + +server.close() +proxyServer.close() +client.close() + +function buildServer () { + return new Promise((resolve, reject) => { + const server = createServer() + server.listen(0, () => resolve(server)) + }) +} + +function buildProxy () { + return new Promise((resolve, reject) => { + const server = proxy(createServer()) + server.listen(0, () => resolve(server)) + }) +} +``` + +### Connect with authentication + +```js +import { Client } from 'undici' +import { createServer } from 'http' +import proxy from 'proxy' + +const server = await buildServer() +const proxyServer = await buildProxy() + +const serverUrl = `http://localhost:${server.address().port}` +const proxyUrl = `http://localhost:${proxyServer.address().port}` + +proxyServer.authenticate = function (req, fn) { + fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`) +} + +server.on('request', (req, res) => { + console.log(req.url) // '/hello?foo=bar' + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) +}) + +const client = new Client(proxyUrl) + +const response = await client.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar', + headers: { + 'proxy-authorization': `Basic ${Buffer.from('user:pass').toString('base64')}` + } +}) + +response.body.setEncoding('utf8') +let data = '' +for await (const chunk of response.body) { + data += chunk +} +console.log(response.statusCode) // 200 +console.log(JSON.parse(data)) // { hello: 'world' } + +server.close() +proxyServer.close() +client.close() + +function buildServer () { + return new Promise((resolve, reject) => { + const server = createServer() + server.listen(0, () => resolve(server)) + }) +} + +function buildProxy () { + return new Promise((resolve, reject) => { + const server = proxy(createServer()) + server.listen(0, () => resolve(server)) + }) +} +``` + diff --git a/node_modules/undici/docs/best-practices/writing-tests.md b/node_modules/undici/docs/best-practices/writing-tests.md new file mode 100644 index 0000000..57549de --- /dev/null +++ b/node_modules/undici/docs/best-practices/writing-tests.md @@ -0,0 +1,20 @@ +# Writing tests + +Undici is tuned for a production use case and its default will keep +a socket open for a few seconds after an HTTP request is completed to +remove the overhead of opening up a new socket. These settings that makes +Undici shine in production are not a good fit for using Undici in automated +tests, as it will result in longer execution times. + +The following are good defaults that will keep the socket open for only 10ms: + +```js +import { request, setGlobalDispatcher, Agent } from 'undici' + +const agent = new Agent({ + keepAliveTimeout: 10, // milliseconds + keepAliveMaxTimeout: 10 // milliseconds +}) + +setGlobalDispatcher(agent) +``` diff --git a/node_modules/undici/index-fetch.js b/node_modules/undici/index-fetch.js new file mode 100644 index 0000000..ba31a65 --- /dev/null +++ b/node_modules/undici/index-fetch.js @@ -0,0 +1,15 @@ +'use strict' + +const fetchImpl = require('./lib/fetch').fetch + +module.exports.fetch = function fetch (resource, init = undefined) { + return fetchImpl(resource, init).catch((err) => { + Error.captureStackTrace(err, this) + throw err + }) +} +module.exports.FormData = require('./lib/fetch/formdata').FormData +module.exports.Headers = require('./lib/fetch/headers').Headers +module.exports.Response = require('./lib/fetch/response').Response +module.exports.Request = require('./lib/fetch/request').Request +module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket diff --git a/node_modules/undici/index.d.ts b/node_modules/undici/index.d.ts new file mode 100644 index 0000000..83a786d --- /dev/null +++ b/node_modules/undici/index.d.ts @@ -0,0 +1,3 @@ +export * from './types/index' +import Undici from './types/index' +export default Undici diff --git a/node_modules/undici/index.js b/node_modules/undici/index.js new file mode 100644 index 0000000..26302cc --- /dev/null +++ b/node_modules/undici/index.js @@ -0,0 +1,167 @@ +'use strict' + +const Client = require('./lib/client') +const Dispatcher = require('./lib/dispatcher') +const errors = require('./lib/core/errors') +const Pool = require('./lib/pool') +const BalancedPool = require('./lib/balanced-pool') +const Agent = require('./lib/agent') +const util = require('./lib/core/util') +const { InvalidArgumentError } = errors +const api = require('./lib/api') +const buildConnector = require('./lib/core/connect') +const MockClient = require('./lib/mock/mock-client') +const MockAgent = require('./lib/mock/mock-agent') +const MockPool = require('./lib/mock/mock-pool') +const mockErrors = require('./lib/mock/mock-errors') +const ProxyAgent = require('./lib/proxy-agent') +const RetryHandler = require('./lib/handler/RetryHandler') +const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global') +const DecoratorHandler = require('./lib/handler/DecoratorHandler') +const RedirectHandler = require('./lib/handler/RedirectHandler') +const createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor') + +let hasCrypto +try { + require('crypto') + hasCrypto = true +} catch { + hasCrypto = false +} + +Object.assign(Dispatcher.prototype, api) + +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler + +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor + +module.exports.buildConnector = buildConnector +module.exports.errors = errors + +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null + } + + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') + } + + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } + + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } + + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } + + url = util.parseURL(url) + } + + const { agent, dispatcher = getGlobalDispatcher() } = opts + + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') + } + + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) + } +} + +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher + +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = require('./lib/fetch').fetch + } + + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } + + throw err + } + } + module.exports.Headers = require('./lib/fetch/headers').Headers + module.exports.Response = require('./lib/fetch/response').Response + module.exports.Request = require('./lib/fetch/request').Request + module.exports.FormData = require('./lib/fetch/formdata').FormData + module.exports.File = require('./lib/fetch/file').File + module.exports.FileReader = require('./lib/fileapi/filereader').FileReader + + const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global') + + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin + + const { CacheStorage } = require('./lib/cache/cachestorage') + const { kConstruct } = require('./lib/cache/symbols') + + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) +} + +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies') + + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie + + const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL') + + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType +} + +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = require('./lib/websocket/websocket') + + module.exports.WebSocket = WebSocket +} + +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) + +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors diff --git a/node_modules/undici/lib/agent.js b/node_modules/undici/lib/agent.js new file mode 100644 index 0000000..0b18f2a --- /dev/null +++ b/node_modules/undici/lib/agent.js @@ -0,0 +1,148 @@ +'use strict' + +const { InvalidArgumentError } = require('./core/errors') +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols') +const DispatcherBase = require('./dispatcher-base') +const Pool = require('./pool') +const Client = require('./client') +const util = require('./core/util') +const createRedirectInterceptor = require('./interceptor/redirectInterceptor') +const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')() + +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') + +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} + +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + if (connect && typeof connect !== 'function') { + connect = { ...connect } + } + + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] + + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) + } + }) + + const agent = this + + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } + + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) + } + } + + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] + } + } + return ret + } + + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + } + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) + } + + return dispatcher.dispatch(opts, handler) + } + + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) + } + } + + await Promise.all(closePromises) + } + + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) + } + } + + await Promise.all(destroyPromises) + } +} + +module.exports = Agent diff --git a/node_modules/undici/lib/api/abort-signal.js b/node_modules/undici/lib/api/abort-signal.js new file mode 100644 index 0000000..2985c1e --- /dev/null +++ b/node_modules/undici/lib/api/abort-signal.js @@ -0,0 +1,54 @@ +const { addAbortListener } = require('../core/util') +const { RequestAbortedError } = require('../core/errors') + +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') + +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) + } +} + +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null + + if (!signal) { + return + } + + if (signal.aborted) { + abort(self) + return + } + + self[kSignal] = signal + self[kListener] = () => { + abort(self) + } + + addAbortListener(self[kSignal], self[kListener]) +} + +function removeSignal (self) { + if (!self[kSignal]) { + return + } + + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) + } else { + self[kSignal].removeListener('abort', self[kListener]) + } + + self[kSignal] = null + self[kListener] = null +} + +module.exports = { + addSignal, + removeSignal +} diff --git a/node_modules/undici/lib/api/api-connect.js b/node_modules/undici/lib/api/api-connect.js new file mode 100644 index 0000000..fd2b6ad --- /dev/null +++ b/node_modules/undici/lib/api/api-connect.js @@ -0,0 +1,104 @@ +'use strict' + +const { AsyncResource } = require('async_hooks') +const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors') +const util = require('../core/util') +const { addSignal, removeSignal } = require('./abort-signal') + +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + const { signal, opaque, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_CONNECT') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders () { + throw new SocketError('bad connect', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + removeSignal(this) + + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } +} + +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = connect diff --git a/node_modules/undici/lib/api/api-pipeline.js b/node_modules/undici/lib/api/api-pipeline.js new file mode 100644 index 0000000..af4a180 --- /dev/null +++ b/node_modules/undici/lib/api/api-pipeline.js @@ -0,0 +1,249 @@ +'use strict' + +const { + Readable, + Duplex, + PassThrough +} = require('stream') +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = require('../core/errors') +const util = require('../core/util') +const { AsyncResource } = require('async_hooks') +const { addSignal, removeSignal } = require('./abort-signal') +const assert = require('assert') + +const kResume = Symbol('resume') + +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) + + this[kResume] = null + } + + _read () { + const { [kResume]: resume } = this + + if (resume) { + this[kResume] = null + resume() + } + } + + _destroy (err, callback) { + this._read() + + callback(err) + } +} + +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume + } + + _read () { + this[kResume]() + } + + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + callback(err) + } +} + +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') + } + + const { signal, method, opaque, onInfo, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_PIPELINE') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null + + this.req = new PipelineRequest().on('error', util.nop) + + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this + + if (body && body.resume) { + body.resume() + } + }, + write: (chunk, encoding, callback) => { + const { req } = this + + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback + } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this + + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() + } + + if (abort && err) { + abort() + } + + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) + + removeSignal(this) + + callback(err) + } + }).on('prefinish', () => { + const { req } = this + + // Node < 15 does not call _final in same tick. + req.push(null) + }) + + this.res = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + const { ret, res } = this + + assert(!res, 'pipeline cannot be retried') + + if (ret.destroyed) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this + + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return + } + + this.res = new PipelineResponse(resume) + + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err + } + + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') + } + + body + .on('data', (chunk) => { + const { ret, body } = this + + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this + + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this + + ret.push(null) + }) + .on('close', () => { + const { ret } = this + + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) + + this.body = body + } + + onData (chunk) { + const { res } = this + return res.push(chunk) + } + + onComplete (trailers) { + const { res } = this + res.push(null) + } + + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) + } +} + +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } +} + +module.exports = pipeline diff --git a/node_modules/undici/lib/api/api-request.js b/node_modules/undici/lib/api/api-request.js new file mode 100644 index 0000000..d4281ce --- /dev/null +++ b/node_modules/undici/lib/api/api-request.js @@ -0,0 +1,180 @@ +'use strict' + +const Readable = require('./readable') +const { + InvalidArgumentError, + RequestAbortedError +} = require('../core/errors') +const util = require('../core/util') +const { getResolveErrorBodyCallback } = require('./util') +const { AsyncResource } = require('async_hooks') +const { addSignal, removeSignal } = require('./abort-signal') + +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } + + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) + + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } + } + } + + onData (chunk) { + const { res } = this + return res.push(chunk) + } + + onComplete (trailers) { + const { res } = this + + removeSignal(this) + + util.parseHeaders(trailers, this.trailers) + + res.push(null) + } + + onError (err) { + const { res, callback, body, opaque } = this + + removeSignal(this) + + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = request +module.exports.RequestHandler = RequestHandler diff --git a/node_modules/undici/lib/api/api-stream.js b/node_modules/undici/lib/api/api-stream.js new file mode 100644 index 0000000..c571a6f --- /dev/null +++ b/node_modules/undici/lib/api/api-stream.js @@ -0,0 +1,220 @@ +'use strict' + +const { finished, PassThrough } = require('stream') +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = require('../core/errors') +const util = require('../core/util') +const { getResolveErrorBodyCallback } = require('./util') +const { AsyncResource } = require('async_hooks') +const { addSignal, removeSignal } = require('./abort-signal') + +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } + + this.factory = null + + let res + + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() + + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } + + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) + + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } + + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this + + this.res = null + if (err || !res.readable) { + util.destroy(res, err) + } + + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) + + if (err) { + abort() + } + }) + } + + res.on('drain', resume) + + this.res = res + + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain + + return needDrain !== true + } + + onData (chunk) { + const { res } = this + + return res ? res.write(chunk) : true + } + + onComplete (trailers) { + const { res } = this + + removeSignal(this) + + if (!res) { + return + } + + this.trailers = util.parseHeaders(trailers) + + res.end() + } + + onError (err) { + const { res, callback, opaque, body } = this + + removeSignal(this) + + this.factory = null + + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = stream diff --git a/node_modules/undici/lib/api/api-upgrade.js b/node_modules/undici/lib/api/api-upgrade.js new file mode 100644 index 0000000..ef783e8 --- /dev/null +++ b/node_modules/undici/lib/api/api-upgrade.js @@ -0,0 +1,105 @@ +'use strict' + +const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors') +const { AsyncResource } = require('async_hooks') +const util = require('../core/util') +const { addSignal, removeSignal } = require('./abort-signal') +const assert = require('assert') + +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + const { signal, opaque, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_UPGRADE') + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = null + } + + onHeaders () { + throw new SocketError('bad upgrade', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + assert.strictEqual(statusCode, 101) + + removeSignal(this) + + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } +} + +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = upgrade diff --git a/node_modules/undici/lib/api/index.js b/node_modules/undici/lib/api/index.js new file mode 100644 index 0000000..8983a5e --- /dev/null +++ b/node_modules/undici/lib/api/index.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports.request = require('./api-request') +module.exports.stream = require('./api-stream') +module.exports.pipeline = require('./api-pipeline') +module.exports.upgrade = require('./api-upgrade') +module.exports.connect = require('./api-connect') diff --git a/node_modules/undici/lib/api/readable.js b/node_modules/undici/lib/api/readable.js new file mode 100644 index 0000000..5269dfa --- /dev/null +++ b/node_modules/undici/lib/api/readable.js @@ -0,0 +1,322 @@ +// Ported from https://github.com/nodejs/undici/pull/907 + +'use strict' + +const assert = require('assert') +const { Readable } = require('stream') +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors') +const util = require('../core/util') +const { ReadableStreamFrom, toUSVString } = require('../core/util') + +let Blob + +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') + +const noop = () => {} + +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) + + this._readableState.dataEmitted = false + + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType + + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false + } + + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this + } + + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + if (err) { + this[kAbort]() + } + + return super.destroy(err) + } + + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true + } + return super.emit(ev, ...args) + } + + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } + + addListener (ev, ...args) { + return this.on(ev, ...args) + } + + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret + } + + removeListener (ev, ...args) { + return this.off(ev, ...args) + } + + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true + } + return super.push(chunk) + } + + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') + } + + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') + } + + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') + } + + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } + + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() + } + + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } + + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] + } + + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal + + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') + } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) + } + } + + if (this.closed) { + return Promise.resolve(null) + } + + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) + } +} + +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} + +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} + +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') + } + + assert(!stream[kConsume]) + + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } + + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) + + process.nextTick(consumeStart, stream[kConsume]) + }) +} + +function consumeStart (consume) { + if (consume.body === null) { + return + } + + const { _readableState: state } = consume.stream + + for (const chunk of state.buffer) { + consumePush(consume, chunk) + } + + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) + } + + consume.stream.resume() + + while (consume.stream.read() != null) { + // Loop + } +} + +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume + + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) + + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength + } + + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = require('buffer').Blob + } + resolve(new Blob(body, { type: stream[kContentType] })) + } + + consumeFinish(consume) + } catch (err) { + stream.destroy(err) + } +} + +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) +} + +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } + + if (err) { + consume.reject(err) + } else { + consume.resolve() + } + + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null +} diff --git a/node_modules/undici/lib/api/util.js b/node_modules/undici/lib/api/util.js new file mode 100644 index 0000000..bffd702 --- /dev/null +++ b/node_modules/undici/lib/api/util.js @@ -0,0 +1,46 @@ +const assert = require('assert') +const { + ResponseStatusCodeError +} = require('../core/errors') +const { toUSVString } = require('../core/util') + +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) + + let chunks = [] + let limit = 0 + + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break + } + } + + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return + } + + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + } catch (err) { + // Process in a fallback if error + } + + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} + +module.exports = { getResolveErrorBodyCallback } diff --git a/node_modules/undici/lib/balanced-pool.js b/node_modules/undici/lib/balanced-pool.js new file mode 100644 index 0000000..10bc6a4 --- /dev/null +++ b/node_modules/undici/lib/balanced-pool.js @@ -0,0 +1,190 @@ +'use strict' + +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = require('./core/errors') +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = require('./pool-base') +const Pool = require('./pool') +const { kUrl, kInterceptors } = require('./core/symbols') +const { parseOrigin } = require('./core/util') +const kFactory = Symbol('factory') + +const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') + +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { + super() + + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 + + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 + + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory + + for (const upstream of upstreams) { + this.addUpstream(upstream) + } + this._updateBalancedPoolStats() + } + + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this + } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) + + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) + + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + }) + + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + } + }) + + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] + } + + this._updateBalancedPoolStats() + + return this + } + + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + } + + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) + + if (pool) { + this[kRemoveClient](pool) + } + + return this + } + + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) + } + + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() + } + + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + + if (!dispatcher) { + return + } + + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) + + if (allClientsBusy) { + return + } + + let counter = 0 + + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] + + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] + } + + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] + + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } + } + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool + } + } + + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] + } +} + +module.exports = BalancedPool diff --git a/node_modules/undici/lib/cache/cache.js b/node_modules/undici/lib/cache/cache.js new file mode 100644 index 0000000..9b31108 --- /dev/null +++ b/node_modules/undici/lib/cache/cache.js @@ -0,0 +1,838 @@ +'use strict' + +const { kConstruct } = require('./symbols') +const { urlEquals, fieldValues: getFieldValues } = require('./util') +const { kEnumerableProperty, isDisturbed } = require('../core/util') +const { kHeadersList } = require('../core/symbols') +const { webidl } = require('../fetch/webidl') +const { Response, cloneResponse } = require('../fetch/response') +const { Request } = require('../fetch/request') +const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols') +const { fetching } = require('../fetch/index') +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util') +const assert = require('assert') +const { getGlobalDispatcher } = require('../global') + +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options + */ + +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList + */ + +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList + + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } + + this.#relevantRequestResponseList = arguments[1] + } + + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) + + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + const p = await this.matchAll(request, options) + + if (p.length === 0) { + return + } + + return p[0] + } + + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) + + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] + } + } + + // 5. + // 5.1 + const responses = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) + } + } + + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! + + // 5.5.1 + const responseList = [] + + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + + responseList.push(responseObject) + } + + // 6. + return Object.freeze(responseList) + } + + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) + + request = webidl.converters.RequestInfo(request) + + // 1. + const requests = [request] + + // 2. + const responseArrayPromise = this.addAll(requests) + + // 3. + return await responseArrayPromise + } + + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) + + requests = webidl.converters['sequence'](requests) + + // 1. + const responsePromises = [] + + // 2. + const requestList = [] + + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } + + // 3.1 + const r = request[kState] + + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } + } + + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] + + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] + + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } + + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' + + // 5.5 + requestList.push(r) + + // 5.6 + const responsePromise = createDeferredPromise() + + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) + + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) + + for (const controller of fetchControllers) { + controller.abort() + } + + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } + + // 2. + responsePromise.resolve(response) + } + })) + + // 5.8 + responsePromises.push(responsePromise.promise) + } + + // 6. + const p = Promise.all(responsePromises) + + // 7. + const responses = await p + + // 7.1 + const operations = [] + + // 7.2 + let index = 0 + + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 + } + + operations.push(operation) // 7.3.5 + + index++ // 7.3.6 + } + + // 7.5 + const cacheJobPromise = createDeferredPromise() + + // 7.6.1 + let errorData = null + + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) + + // 7.7 + return cacheJobPromise.promise + } + + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) + + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) + + // 1. + let innerRequest = null + + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] + } + + // 4. + if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Expected an http/s scheme when method is not GET' + }) + } + + // 5. + const innerResponse = response[kState] + + // 6. + if (innerResponse.status === 206) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got 206 status' + }) + } + + // 7. + if (innerResponse.headersList.contains('vary')) { + // 7.1. + const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) + + // 7.2. + for (const fieldValue of fieldValues) { + // 7.2.1 + if (fieldValue === '*') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got * vary field value' + }) + } + } + } + + // 8. + if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Response body is locked or disturbed' + }) + } + + // 9. + const clonedResponse = cloneResponse(innerResponse) + + // 10. + const bodyReadPromise = createDeferredPromise() + + // 11. + if (innerResponse.body != null) { + // 11.1 + const stream = innerResponse.body.stream + + // 11.2 + const reader = stream.getReader() + + // 11.3 + readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) + } else { + bodyReadPromise.resolve(undefined) + } + + // 12. + /** @type {CacheBatchOperation[]} */ + const operations = [] + + // 13. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 14. + request: innerRequest, // 15. + response: clonedResponse // 16. + } + + // 17. + operations.push(operation) + + // 19. + const bytes = await bodyReadPromise.promise + + if (clonedResponse.body != null) { + clonedResponse.body.source = bytes + } + + // 19.1 + const cacheJobPromise = createDeferredPromise() + + // 19.2.1 + let errorData = null + + // 19.2.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + // 19.2.3 + queueMicrotask(() => { + // 19.2.3.1 + if (errorData === null) { + cacheJobPromise.resolve() + } else { // 19.2.3.2 + cacheJobPromise.reject(errorData) + } + }) + + return cacheJobPromise.promise + } + + async delete (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) + + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + /** + * @type {Request} + */ + let r = null + + if (request instanceof Request) { + r = request[kState] + + if (r.method !== 'GET' && !options.ignoreMethod) { + return false + } + } else { + assert(typeof request === 'string') + + r = new Request(request)[kState] + } + + /** @type {CacheBatchOperation[]} */ + const operations = [] + + /** @type {CacheBatchOperation} */ + const operation = { + type: 'delete', + request: r, + options + } + + operations.push(operation) + + const cacheJobPromise = createDeferredPromise() + + let errorData = null + let requestResponses + + try { + requestResponses = this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + queueMicrotask(() => { + if (errorData === null) { + cacheJobPromise.resolve(!!requestResponses?.length) + } else { + cacheJobPromise.reject(errorData) + } + }) + + return cacheJobPromise.promise + } + + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys + * @param {any} request + * @param {import('../../types/cache').CacheQueryOptions} options + * @returns {readonly Request[]} + */ + async keys (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) + + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + // 2.1 + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { // 2.2 + r = new Request(request)[kState] + } + } + + // 4. + const promise = createDeferredPromise() + + // 5. + // 5.1 + const requests = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + // 5.2.1.1 + requests.push(requestResponse[0]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + // 5.3.2.1 + requests.push(requestResponse[0]) + } + } + + // 5.4 + queueMicrotask(() => { + // 5.4.1 + const requestList = [] + + // 5.4.2 + for (const request of requests) { + const requestObject = new Request('https://a') + requestObject[kState] = request + requestObject[kHeaders][kHeadersList] = request.headersList + requestObject[kHeaders][kGuard] = 'immutable' + requestObject[kRealm] = request.client + + // 5.4.2.1 + requestList.push(requestObject) + } + + // 5.4.3 + promise.resolve(Object.freeze(requestList)) + }) + + return promise.promise + } + + /** + * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm + * @param {CacheBatchOperation[]} operations + * @returns {requestResponseList} + */ + #batchCacheOperations (operations) { + // 1. + const cache = this.#relevantRequestResponseList + + // 2. + const backupCache = [...cache] + + // 3. + const addedItems = [] + + // 4.1 + const resultList = [] + + try { + // 4.2 + for (const operation of operations) { + // 4.2.1 + if (operation.type !== 'delete' && operation.type !== 'put') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'operation type does not match "delete" or "put"' + }) + } + + // 4.2.2 + if (operation.type === 'delete' && operation.response != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'delete operation should not have an associated response' + }) + } + + // 4.2.3 + if (this.#queryCache(operation.request, operation.options, addedItems).length) { + throw new DOMException('???', 'InvalidStateError') + } + + // 4.2.4 + let requestResponses + + // 4.2.5 + if (operation.type === 'delete') { + // 4.2.5.1 + requestResponses = this.#queryCache(operation.request, operation.options) + + // TODO: the spec is wrong, this is needed to pass WPTs + if (requestResponses.length === 0) { + return [] + } + + // 4.2.5.2 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.5.2.1 + cache.splice(idx, 1) + } + } else if (operation.type === 'put') { // 4.2.6 + // 4.2.6.1 + if (operation.response == null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'put operation should have an associated response' + }) + } + + // 4.2.6.2 + const r = operation.request + + // 4.2.6.3 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'expected http or https scheme' + }) + } + + // 4.2.6.4 + if (r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'not get method' + }) + } + + // 4.2.6.5 + if (operation.options != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'options must not be defined' + }) + } + + // 4.2.6.6 + requestResponses = this.#queryCache(operation.request) + + // 4.2.6.7 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.6.7.1 + cache.splice(idx, 1) + } + + // 4.2.6.8 + cache.push([operation.request, operation.response]) + + // 4.2.6.10 + addedItems.push([operation.request, operation.response]) + } + + // 4.2.7 + resultList.push([operation.request, operation.response]) + } + + // 4.3 + return resultList + } catch (e) { // 5. + // 5.1 + this.#relevantRequestResponseList.length = 0 + + // 5.2 + this.#relevantRequestResponseList = backupCache + + // 5.3 + throw e + } + } + + /** + * @see https://w3c.github.io/ServiceWorker/#query-cache + * @param {any} requestQuery + * @param {import('../../types/cache').CacheQueryOptions} options + * @param {requestResponseList} targetStorage + * @returns {requestResponseList} + */ + #queryCache (requestQuery, options, targetStorage) { + /** @type {requestResponseList} */ + const resultList = [] + + const storage = targetStorage ?? this.#relevantRequestResponseList + + for (const requestResponse of storage) { + const [cachedRequest, cachedResponse] = requestResponse + if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { + resultList.push(requestResponse) + } + } + + return resultList + } + + /** + * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm + * @param {any} requestQuery + * @param {any} request + * @param {any | null} response + * @param {import('../../types/cache').CacheQueryOptions | undefined} options + * @returns {boolean} + */ + #requestMatchesCachedItem (requestQuery, request, response = null, options) { + // if (options?.ignoreMethod === false && request.method === 'GET') { + // return false + // } + + const queryURL = new URL(requestQuery.url) + + const cachedURL = new URL(request.url) + + if (options?.ignoreSearch) { + cachedURL.search = '' + + queryURL.search = '' + } + + if (!urlEquals(queryURL, cachedURL, true)) { + return false + } + + if ( + response == null || + options?.ignoreVary || + !response.headersList.contains('vary') + ) { + return true + } + + const fieldValues = getFieldValues(response.headersList.get('vary')) + + for (const fieldValue of fieldValues) { + if (fieldValue === '*') { + return false + } + + const requestValue = request.headersList.get(fieldValue) + const queryValue = requestQuery.headersList.get(fieldValue) + + // If one has the header and the other doesn't, or one has + // a different value than the other, return false + if (requestValue !== queryValue) { + return false + } + } + + return true + } +} + +Object.defineProperties(Cache.prototype, { + [Symbol.toStringTag]: { + value: 'Cache', + configurable: true + }, + match: kEnumerableProperty, + matchAll: kEnumerableProperty, + add: kEnumerableProperty, + addAll: kEnumerableProperty, + put: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +const cacheQueryOptionConverters = [ + { + key: 'ignoreSearch', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreMethod', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreVary', + converter: webidl.converters.boolean, + defaultValue: false + } +] + +webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) + +webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ + ...cacheQueryOptionConverters, + { + key: 'cacheName', + converter: webidl.converters.DOMString + } +]) + +webidl.converters.Response = webidl.interfaceConverter(Response) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.RequestInfo +) + +module.exports = { + Cache +} diff --git a/node_modules/undici/lib/cache/cachestorage.js b/node_modules/undici/lib/cache/cachestorage.js new file mode 100644 index 0000000..7e7f0cf --- /dev/null +++ b/node_modules/undici/lib/cache/cachestorage.js @@ -0,0 +1,144 @@ +'use strict' + +const { kConstruct } = require('./symbols') +const { Cache } = require('./cache') +const { webidl } = require('../fetch/webidl') +const { kEnumerableProperty } = require('../core/util') + +class CacheStorage { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map + * @type {Map} + */ + async has (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1.1 + // 2.2 + return this.#caches.has(cacheName) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open + * @param {string} cacheName + * @returns {Promise} + */ + async open (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1 + if (this.#caches.has(cacheName)) { + // await caches.open('v1') !== await caches.open('v1') + + // 2.1.1 + const cache = this.#caches.get(cacheName) + + // 2.1.1.1 + return new Cache(kConstruct, cache) + } + + // 2.2 + const cache = [] + + // 2.3 + this.#caches.set(cacheName, cache) + + // 2.4 + return new Cache(kConstruct, cache) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete + * @param {string} cacheName + * @returns {Promise} + */ + async delete (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) + + cacheName = webidl.converters.DOMString(cacheName) + + return this.#caches.delete(cacheName) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys + * @returns {string[]} + */ + async keys () { + webidl.brandCheck(this, CacheStorage) + + // 2.1 + const keys = this.#caches.keys() + + // 2.2 + return [...keys] + } +} + +Object.defineProperties(CacheStorage.prototype, { + [Symbol.toStringTag]: { + value: 'CacheStorage', + configurable: true + }, + match: kEnumerableProperty, + has: kEnumerableProperty, + open: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +module.exports = { + CacheStorage +} diff --git a/node_modules/undici/lib/cache/symbols.js b/node_modules/undici/lib/cache/symbols.js new file mode 100644 index 0000000..40448d6 --- /dev/null +++ b/node_modules/undici/lib/cache/symbols.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = { + kConstruct: require('../core/symbols').kConstruct +} diff --git a/node_modules/undici/lib/cache/util.js b/node_modules/undici/lib/cache/util.js new file mode 100644 index 0000000..44d52b7 --- /dev/null +++ b/node_modules/undici/lib/cache/util.js @@ -0,0 +1,49 @@ +'use strict' + +const assert = require('assert') +const { URLSerializer } = require('../fetch/dataURL') +const { isValidHeaderName } = require('../fetch/util') + +/** + * @see https://url.spec.whatwg.org/#concept-url-equals + * @param {URL} A + * @param {URL} B + * @param {boolean | undefined} excludeFragment + * @returns {boolean} + */ +function urlEquals (A, B, excludeFragment = false) { + const serializedA = URLSerializer(A, excludeFragment) + + const serializedB = URLSerializer(B, excludeFragment) + + return serializedA === serializedB +} + +/** + * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 + * @param {string} header + */ +function fieldValues (header) { + assert(header !== null) + + const values = [] + + for (let value of header.split(',')) { + value = value.trim() + + if (!value.length) { + continue + } else if (!isValidHeaderName(value)) { + continue + } + + values.push(value) + } + + return values +} + +module.exports = { + urlEquals, + fieldValues +} diff --git a/node_modules/undici/lib/client.js b/node_modules/undici/lib/client.js new file mode 100644 index 0000000..22cb390 --- /dev/null +++ b/node_modules/undici/lib/client.js @@ -0,0 +1,2283 @@ +// @ts-check + +'use strict' + +/* global WebAssembly */ + +const assert = require('assert') +const net = require('net') +const http = require('http') +const { pipeline } = require('stream') +const util = require('./core/util') +const timers = require('./timers') +const Request = require('./core/request') +const DispatcherBase = require('./dispatcher-base') +const { + RequestContentLengthMismatchError, + ResponseContentLengthMismatchError, + InvalidArgumentError, + RequestAbortedError, + HeadersTimeoutError, + HeadersOverflowError, + SocketError, + InformationalError, + BodyTimeoutError, + HTTPParserError, + ResponseExceededMaxSizeError, + ClientDestroyedError +} = require('./core/errors') +const buildConnector = require('./core/connect') +const { + kUrl, + kReset, + kServerName, + kClient, + kBusy, + kParser, + kConnect, + kBlocking, + kResuming, + kRunning, + kPending, + kSize, + kWriting, + kQueue, + kConnected, + kConnecting, + kNeedDrain, + kNoRef, + kKeepAliveDefaultTimeout, + kHostHeader, + kPendingIdx, + kRunningIdx, + kError, + kPipelining, + kSocket, + kKeepAliveTimeoutValue, + kMaxHeadersSize, + kKeepAliveMaxTimeout, + kKeepAliveTimeoutThreshold, + kHeadersTimeout, + kBodyTimeout, + kStrictContentLength, + kConnector, + kMaxRedirections, + kMaxRequests, + kCounter, + kClose, + kDestroy, + kDispatch, + kInterceptors, + kLocalAddress, + kMaxResponseSize, + kHTTPConnVersion, + // HTTP2 + kHost, + kHTTP2Session, + kHTTP2SessionState, + kHTTP2BuildRequest, + kHTTP2CopyHeaders, + kHTTP1BuildRequest +} = require('./core/symbols') + +/** @type {import('http2')} */ +let http2 +try { + http2 = require('http2') +} catch { + // @ts-ignore + http2 = { constants: {} } +} + +const { + constants: { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_EXPECT, + HTTP2_HEADER_STATUS + } +} = http2 + +// Experimental +let h2ExperimentalWarned = false + +const FastBuffer = Buffer[Symbol.species] + +const kClosedResolve = Symbol('kClosedResolve') + +const channels = {} + +try { + const diagnosticsChannel = require('diagnostics_channel') + channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') + channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') + channels.connectError = diagnosticsChannel.channel('undici:client:connectError') + channels.connected = diagnosticsChannel.channel('undici:client:connected') +} catch { + channels.sendHeaders = { hasSubscribers: false } + channels.beforeConnect = { hasSubscribers: false } + channels.connectError = { hasSubscribers: false } + channels.connected = { hasSubscribers: false } +} + +/** + * @type {import('../types/client').default} + */ +class Client extends DispatcherBase { + /** + * + * @param {string|URL} url + * @param {import('../types/client').Client.Options} options + */ + constructor (url, { + interceptors, + maxHeaderSize, + headersTimeout, + socketTimeout, + requestTimeout, + connectTimeout, + bodyTimeout, + idleTimeout, + keepAlive, + keepAliveTimeout, + maxKeepAliveTimeout, + keepAliveMaxTimeout, + keepAliveTimeoutThreshold, + socketPath, + pipelining, + tls, + strictContentLength, + maxCachedSessions, + maxRedirections, + connect, + maxRequestsPerClient, + localAddress, + maxResponseSize, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + // h2 + allowH2, + maxConcurrentStreams + } = {}) { + super() + + if (keepAlive !== undefined) { + throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') + } + + if (socketTimeout !== undefined) { + throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } + + if (requestTimeout !== undefined) { + throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } + + if (idleTimeout !== undefined) { + throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + } + + if (maxKeepAliveTimeout !== undefined) { + throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } + + if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { + throw new InvalidArgumentError('invalid maxHeaderSize') + } + + if (socketPath != null && typeof socketPath !== 'string') { + throw new InvalidArgumentError('invalid socketPath') + } + + if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { + throw new InvalidArgumentError('invalid connectTimeout') + } + + if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveTimeout') + } + + if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + } + + if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { + throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') + } + + if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') + } + + if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { + throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + } + + if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { + throw new InvalidArgumentError('localAddress must be valid string IP address') + } + + if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { + throw new InvalidArgumentError('maxResponseSize must be a positive number') + } + + if ( + autoSelectFamilyAttemptTimeout != null && + (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) + ) { + throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') + } + + // h2 + if (allowH2 != null && typeof allowH2 !== 'boolean') { + throw new InvalidArgumentError('allowH2 must be a valid boolean value') + } + + if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { + throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) + ? interceptors.Client + : [createRedirectInterceptor({ maxRedirections })] + this[kUrl] = util.parseOrigin(url) + this[kConnector] = connect + this[kSocket] = null + this[kPipelining] = pipelining != null ? pipelining : 1 + this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize + this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout + this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout + this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold + this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] + this[kServerName] = null + this[kLocalAddress] = localAddress != null ? localAddress : null + this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` + this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 + this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 + this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength + this[kMaxRedirections] = maxRedirections + this[kMaxRequests] = maxRequestsPerClient + this[kClosedResolve] = null + this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 + this[kHTTPConnVersion] = 'h1' + + // HTTP/2 + this[kHTTP2Session] = null + this[kHTTP2SessionState] = !allowH2 + ? null + : { + // streams: null, // Fixed queue of streams - For future support of `push` + openStreams: 0, // Keep track of them to decide wether or not unref the session + maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + } + this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` + + // kQueue is built up of 3 sections separated by + // the kRunningIdx and kPendingIdx indices. + // | complete | running | pending | + // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length + // kRunningIdx points to the first running element. + // kPendingIdx points to the first pending element. + // This implements a fast queue with an amortized + // time of O(1). + + this[kQueue] = [] + this[kRunningIdx] = 0 + this[kPendingIdx] = 0 + } + + get pipelining () { + return this[kPipelining] + } + + set pipelining (value) { + this[kPipelining] = value + resume(this, true) + } + + get [kPending] () { + return this[kQueue].length - this[kPendingIdx] + } + + get [kRunning] () { + return this[kPendingIdx] - this[kRunningIdx] + } + + get [kSize] () { + return this[kQueue].length - this[kRunningIdx] + } + + get [kConnected] () { + return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed + } + + get [kBusy] () { + const socket = this[kSocket] + return ( + (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || + (this[kSize] >= (this[kPipelining] || 1)) || + this[kPending] > 0 + ) + } + + /* istanbul ignore: only used for test */ + [kConnect] (cb) { + connect(this) + this.once('connect', cb) + } + + [kDispatch] (opts, handler) { + const origin = opts.origin || this[kUrl].origin + + const request = this[kHTTPConnVersion] === 'h2' + ? Request[kHTTP2BuildRequest](origin, opts, handler) + : Request[kHTTP1BuildRequest](origin, opts, handler) + + this[kQueue].push(request) + if (this[kResuming]) { + // Do nothing. + } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { + // Wait a tick in case stream/iterator is ended in the same tick. + this[kResuming] = 1 + process.nextTick(resume, this) + } else { + resume(this, true) + } + + if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { + this[kNeedDrain] = 2 + } + + return this[kNeedDrain] < 2 + } + + async [kClose] () { + // TODO: for H2 we need to gracefully flush the remaining enqueued + // request and close each stream. + return new Promise((resolve) => { + if (!this[kSize]) { + resolve(null) + } else { + this[kClosedResolve] = resolve + } + }) + } + + async [kDestroy] (err) { + return new Promise((resolve) => { + const requests = this[kQueue].splice(this[kPendingIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } + + const callback = () => { + if (this[kClosedResolve]) { + // TODO (fix): Should we error here with ClientDestroyedError? + this[kClosedResolve]() + this[kClosedResolve] = null + } + resolve() + } + + if (this[kHTTP2Session] != null) { + util.destroy(this[kHTTP2Session], err) + this[kHTTP2Session] = null + this[kHTTP2SessionState] = null + } + + if (!this[kSocket]) { + queueMicrotask(callback) + } else { + util.destroy(this[kSocket].on('close', callback), err) + } + + resume(this) + }) + } +} + +function onHttp2SessionError (err) { + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + this[kSocket][kError] = err + + onError(this[kClient], err) +} + +function onHttp2FrameError (type, code, id) { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + + if (id === 0) { + this[kSocket][kError] = err + onError(this[kClient], err) + } +} + +function onHttp2SessionEnd () { + util.destroy(this, new SocketError('other side closed')) + util.destroy(this[kSocket], new SocketError('other side closed')) +} + +function onHTTP2GoAway (code) { + const client = this[kClient] + const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) + client[kSocket] = null + client[kHTTP2Session] = null + + if (client.destroyed) { + assert(this[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } + } else if (client[kRunning] > 0) { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) + } + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', + client[kUrl], + [client], + err + ) + + resume(client) +} + +const constants = require('./llhttp/constants') +const createRedirectInterceptor = require('./interceptor/redirectInterceptor') +const EMPTY_BUF = Buffer.alloc(0) + +async function lazyllhttp () { + const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp-wasm.js') : undefined + + let mod + try { + mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd-wasm.js'), 'base64')) + } catch (e) { + /* istanbul ignore next */ + + // We could check if the error was caused by the simd option not + // being enabled, but the occurring of this other error + // * https://github.com/emscripten-core/emscripten/issues/11495 + // got me to remove that check to avoid breaking Node 12. + mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp-wasm.js'), 'base64')) + } + + return await WebAssembly.instantiate(mod, { + env: { + /* eslint-disable camelcase */ + + wasm_on_url: (p, at, len) => { + /* istanbul ignore next */ + return 0 + }, + wasm_on_status: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_begin: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageBegin() || 0 + }, + wasm_on_header_field: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_header_value: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 + }, + wasm_on_body: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_complete: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageComplete() || 0 + } + + /* eslint-enable camelcase */ + } + }) +} + +let llhttpInstance = null +let llhttpPromise = lazyllhttp() +llhttpPromise.catch() + +let currentParser = null +let currentBufferRef = null +let currentBufferSize = 0 +let currentBufferPtr = null + +const TIMEOUT_HEADERS = 1 +const TIMEOUT_BODY = 2 +const TIMEOUT_IDLE = 3 + +class Parser { + constructor (client, socket, { exports }) { + assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) + + this.llhttp = exports + this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) + this.client = client + this.socket = socket + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + this.statusCode = null + this.statusText = '' + this.upgrade = false + this.headers = [] + this.headersSize = 0 + this.headersMaxSize = client[kMaxHeadersSize] + this.shouldKeepAlive = false + this.paused = false + this.resume = this.resume.bind(this) + + this.bytesRead = 0 + + this.keepAlive = '' + this.contentLength = '' + this.connection = '' + this.maxResponseSize = client[kMaxResponseSize] + } + + setTimeout (value, type) { + this.timeoutType = type + if (value !== this.timeoutValue) { + timers.clearTimeout(this.timeout) + if (value) { + this.timeout = timers.setTimeout(onParserTimeout, value, this) + // istanbul ignore else: only for jest + if (this.timeout.unref) { + this.timeout.unref() + } + } else { + this.timeout = null + } + this.timeoutValue = value + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + } + + resume () { + if (this.socket.destroyed || !this.paused) { + return + } + + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_resume(this.ptr) + + assert(this.timeoutType === TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + this.paused = false + this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. + this.readMore() + } + + readMore () { + while (!this.paused && this.ptr) { + const chunk = this.socket.read() + if (chunk === null) { + break + } + this.execute(chunk) + } + } + + execute (data) { + assert(this.ptr != null) + assert(currentParser == null) + assert(!this.paused) + + const { socket, llhttp } = this + + if (data.length > currentBufferSize) { + if (currentBufferPtr) { + llhttp.free(currentBufferPtr) + } + currentBufferSize = Math.ceil(data.length / 4096) * 4096 + currentBufferPtr = llhttp.malloc(currentBufferSize) + } + + new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) + + // Call `execute` on the wasm parser. + // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, + // and finally the length of bytes to parse. + // The return value is an error code or `constants.ERROR.OK`. + try { + let ret + + try { + currentBufferRef = data + currentParser = this + ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) + /* eslint-disable-next-line no-useless-catch */ + } catch (err) { + /* istanbul ignore next: difficult to make a test case for */ + throw err + } finally { + currentParser = null + currentBufferRef = null + } + + const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr + + if (ret === constants.ERROR.PAUSED_UPGRADE) { + this.onUpgrade(data.slice(offset)) + } else if (ret === constants.ERROR.PAUSED) { + this.paused = true + socket.unshift(data.slice(offset)) + } else if (ret !== constants.ERROR.OK) { + const ptr = llhttp.llhttp_get_error_reason(this.ptr) + let message = '' + /* istanbul ignore else: difficult to make a test case for */ + if (ptr) { + const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) + message = + 'Response does not match the HTTP/1.1 protocol (' + + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + + ')' + } + throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) + } + } catch (err) { + util.destroy(socket, err) + } + } + + destroy () { + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_free(this.ptr) + this.ptr = null + + timers.clearTimeout(this.timeout) + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + + this.paused = false + } + + onStatus (buf) { + this.statusText = buf.toString() + } + + onMessageBegin () { + const { socket, client } = this + + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + if (!request) { + return -1 + } + } + + onHeaderField (buf) { + const len = this.headers.length + + if ((len & 1) === 0) { + this.headers.push(buf) + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } + + this.trackHeader(buf.length) + } + + onHeaderValue (buf) { + let len = this.headers.length + + if ((len & 1) === 1) { + this.headers.push(buf) + len += 1 + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } + + const key = this.headers[len - 2] + if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { + this.keepAlive += buf.toString() + } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { + this.connection += buf.toString() + } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { + this.contentLength += buf.toString() + } + + this.trackHeader(buf.length) + } + + trackHeader (len) { + this.headersSize += len + if (this.headersSize >= this.headersMaxSize) { + util.destroy(this.socket, new HeadersOverflowError()) + } + } + + onUpgrade (head) { + const { upgrade, client, socket, headers, statusCode } = this + + assert(upgrade) + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(!socket.destroyed) + assert(socket === client[kSocket]) + assert(!this.paused) + assert(request.upgrade || request.method === 'CONNECT') + + this.statusCode = null + this.statusText = '' + this.shouldKeepAlive = null + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + socket.unshift(head) + + socket[kParser].destroy() + socket[kParser] = null + + socket[kClient] = null + socket[kError] = null + socket + .removeListener('error', onSocketError) + .removeListener('readable', onSocketReadable) + .removeListener('end', onSocketEnd) + .removeListener('close', onSocketClose) + + client[kSocket] = null + client[kQueue][client[kRunningIdx]++] = null + client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) + + try { + request.onUpgrade(statusCode, headers, socket) + } catch (err) { + util.destroy(socket, err) + } + + resume(client) + } + + onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { + const { client, socket, headers, statusText } = this + + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + + /* istanbul ignore next: difficult to make a test case for */ + if (!request) { + return -1 + } + + assert(!this.upgrade) + assert(this.statusCode < 200) + + if (statusCode === 100) { + util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) + return -1 + } + + /* this can only happen if server is misbehaving */ + if (upgrade && !request.upgrade) { + util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) + return -1 + } + + assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) + + this.statusCode = statusCode + this.shouldKeepAlive = ( + shouldKeepAlive || + // Override llhttp value which does not allow keepAlive for HEAD. + (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + ) + + if (this.statusCode >= 200) { + const bodyTimeout = request.bodyTimeout != null + ? request.bodyTimeout + : client[kBodyTimeout] + this.setTimeout(bodyTimeout, TIMEOUT_BODY) + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + if (request.method === 'CONNECT') { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + if (upgrade) { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (this.shouldKeepAlive && client[kPipelining]) { + const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null + + if (keepAliveTimeout != null) { + const timeout = Math.min( + keepAliveTimeout - client[kKeepAliveTimeoutThreshold], + client[kKeepAliveMaxTimeout] + ) + if (timeout <= 0) { + socket[kReset] = true + } else { + client[kKeepAliveTimeoutValue] = timeout + } + } else { + client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] + } + } else { + // Stop more requests from being dispatched. + socket[kReset] = true + } + + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { + return -1 + } + + if (request.method === 'HEAD') { + return 1 + } + + if (statusCode < 200) { + return 1 + } + + if (socket[kBlocking]) { + socket[kBlocking] = false + resume(client) + } + + return pause ? constants.ERROR.PAUSED : 0 + } + + onBody (buf) { + const { client, socket, statusCode, maxResponseSize } = this + + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + assert(statusCode >= 200) + + if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { + util.destroy(socket, new ResponseExceededMaxSizeError()) + return -1 + } + + this.bytesRead += buf.length + + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED + } + } + + onMessageComplete () { + const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + + if (socket.destroyed && (!statusCode || shouldKeepAlive)) { + return -1 + } + + if (upgrade) { + return + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(statusCode >= 100) + + this.statusCode = null + this.statusText = '' + this.bytesRead = 0 + this.contentLength = '' + this.keepAlive = '' + this.connection = '' + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (statusCode < 200) { + return + } + + /* istanbul ignore next: should be handled by llhttp? */ + if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { + util.destroy(socket, new ResponseContentLengthMismatchError()) + return -1 + } + + request.onComplete(headers) + + client[kQueue][client[kRunningIdx]++] = null + + if (socket[kWriting]) { + assert.strictEqual(client[kRunning], 0) + // Response completed before request. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (!shouldKeepAlive) { + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (socket[kReset] && client[kRunning] === 0) { + // Destroy socket once all requests have completed. + // The request at the tail of the pipeline is the one + // that requested reset and no further requests should + // have been queued since then. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (client[kPipelining] === 1) { + // We must wait a full event loop cycle to reuse this socket to make sure + // that non-spec compliant servers are not closing the connection even if they + // said they won't. + setImmediate(resume, client) + } else { + resume(client) + } + } +} + +function onParserTimeout (parser) { + const { socket, timeoutType, client } = parser + + /* istanbul ignore else */ + if (timeoutType === TIMEOUT_HEADERS) { + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) + } + } else if (timeoutType === TIMEOUT_BODY) { + if (!parser.paused) { + util.destroy(socket, new BodyTimeoutError()) + } + } else if (timeoutType === TIMEOUT_IDLE) { + assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) + util.destroy(socket, new InformationalError('socket idle timeout')) + } +} + +function onSocketReadable () { + const { [kParser]: parser } = this + if (parser) { + parser.readMore() + } +} + +function onSocketError (err) { + const { [kClient]: client, [kParser]: parser } = this + + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + if (client[kHTTPConnVersion] !== 'h2') { + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded + // to the user. + if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so for as a valid response. + parser.onMessageComplete() + return + } + } + + this[kError] = err + + onError(this[kClient], err) +} + +function onError (client, err) { + if ( + client[kRunning] === 0 && + err.code !== 'UND_ERR_INFO' && + err.code !== 'UND_ERR_SOCKET' + ) { + // Error is not caused by running request and not a recoverable + // socket error. + + assert(client[kPendingIdx] === client[kRunningIdx]) + + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + assert(client[kSize] === 0) + } +} + +function onSocketEnd () { + const { [kParser]: parser, [kClient]: client } = this + + if (client[kHTTPConnVersion] !== 'h2') { + if (parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + return + } + } + + util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +} + +function onSocketClose () { + const { [kClient]: client, [kParser]: parser } = this + + if (client[kHTTPConnVersion] === 'h1' && parser) { + if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + } + + this[kParser].destroy() + this[kParser] = null + } + + const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) + + client[kSocket] = null + + if (client.destroyed) { + assert(client[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) + } + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', client[kUrl], [client], err) + + resume(client) +} + +async function connect (client) { + assert(!client[kConnecting]) + assert(!client[kSocket]) + + let { host, hostname, protocol, port } = client[kUrl] + + // Resolve ipv6 + if (hostname[0] === '[') { + const idx = hostname.indexOf(']') + + assert(idx !== -1) + const ip = hostname.substring(1, idx) + + assert(net.isIP(ip)) + hostname = ip + } + + client[kConnecting] = true + + if (channels.beforeConnect.hasSubscribers) { + channels.beforeConnect.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector] + }) + } + + try { + const socket = await new Promise((resolve, reject) => { + client[kConnector]({ + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, (err, socket) => { + if (err) { + reject(err) + } else { + resolve(socket) + } + }) + }) + + if (client.destroyed) { + util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) + return + } + + client[kConnecting] = false + + assert(socket) + + const isH2 = socket.alpnProtocol === 'h2' + if (isH2) { + if (!h2ExperimentalWarned) { + h2ExperimentalWarned = true + process.emitWarning('H2 support is experimental, expect them to change at any time.', { + code: 'UNDICI-H2' + }) + } + + const session = http2.connect(client[kUrl], { + createConnection: () => socket, + peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams + }) + + client[kHTTPConnVersion] = 'h2' + session[kClient] = client + session[kSocket] = socket + session.on('error', onHttp2SessionError) + session.on('frameError', onHttp2FrameError) + session.on('end', onHttp2SessionEnd) + session.on('goaway', onHTTP2GoAway) + session.on('close', onSocketClose) + session.unref() + + client[kHTTP2Session] = session + socket[kHTTP2Session] = session + } else { + if (!llhttpInstance) { + llhttpInstance = await llhttpPromise + llhttpPromise = null + } + + socket[kNoRef] = false + socket[kWriting] = false + socket[kReset] = false + socket[kBlocking] = false + socket[kParser] = new Parser(client, socket, llhttpInstance) + } + + socket[kCounter] = 0 + socket[kMaxRequests] = client[kMaxRequests] + socket[kClient] = client + socket[kError] = null + + socket + .on('error', onSocketError) + .on('readable', onSocketReadable) + .on('end', onSocketEnd) + .on('close', onSocketClose) + + client[kSocket] = socket + + if (channels.connected.hasSubscribers) { + channels.connected.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + socket + }) + } + client.emit('connect', client[kUrl], [client]) + } catch (err) { + if (client.destroyed) { + return + } + + client[kConnecting] = false + + if (channels.connectError.hasSubscribers) { + channels.connectError.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + error: err + }) + } + + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + assert(client[kRunning] === 0) + while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { + const request = client[kQueue][client[kPendingIdx]++] + errorRequest(client, request, err) + } + } else { + onError(client, err) + } + + client.emit('connectionError', client[kUrl], [client], err) + } + + resume(client) +} + +function emitDrain (client) { + client[kNeedDrain] = 0 + client.emit('drain', client[kUrl], [client]) +} + +function resume (client, sync) { + if (client[kResuming] === 2) { + return + } + + client[kResuming] = 2 + + _resume(client, sync) + client[kResuming] = 0 + + if (client[kRunningIdx] > 256) { + client[kQueue].splice(0, client[kRunningIdx]) + client[kPendingIdx] -= client[kRunningIdx] + client[kRunningIdx] = 0 + } +} + +function _resume (client, sync) { + while (true) { + if (client.destroyed) { + assert(client[kPending] === 0) + return + } + + if (client[kClosedResolve] && !client[kSize]) { + client[kClosedResolve]() + client[kClosedResolve] = null + return + } + + const socket = client[kSocket] + + if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { + if (client[kSize] === 0) { + if (!socket[kNoRef] && socket.unref) { + socket.unref() + socket[kNoRef] = true + } + } else if (socket[kNoRef] && socket.ref) { + socket.ref() + socket[kNoRef] = false + } + + if (client[kSize] === 0) { + if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + } + } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { + if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { + const request = client[kQueue][client[kRunningIdx]] + const headersTimeout = request.headersTimeout != null + ? request.headersTimeout + : client[kHeadersTimeout] + socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + } + } + } + + if (client[kBusy]) { + client[kNeedDrain] = 2 + } else if (client[kNeedDrain] === 2) { + if (sync) { + client[kNeedDrain] = 1 + process.nextTick(emitDrain, client) + } else { + emitDrain(client) + } + continue + } + + if (client[kPending] === 0) { + return + } + + if (client[kRunning] >= (client[kPipelining] || 1)) { + return + } + + const request = client[kQueue][client[kPendingIdx]] + + if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { + if (client[kRunning] > 0) { + return + } + + client[kServerName] = request.servername + + if (socket && socket.servername !== request.servername) { + util.destroy(socket, new InformationalError('servername changed')) + return + } + } + + if (client[kConnecting]) { + return + } + + if (!socket && !client[kHTTP2Session]) { + connect(client) + return + } + + if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { + return + } + + if (client[kRunning] > 0 && !request.idempotent) { + // Non-idempotent request cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { + // Don't dispatch an upgrade until all preceding requests have completed. + // A misbehaving server might upgrade the connection before all pipelined + // request has completed. + return + } + + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && + (util.isStream(request.body) || util.isAsyncIterable(request.body))) { + // Request with stream or iterator body can error while other requests + // are inflight and indirectly error those as well. + // Ensure this doesn't happen by waiting for inflight + // to complete before dispatching. + + // Request with stream or iterator body cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (!request.aborted && write(client, request)) { + client[kPendingIdx]++ + } else { + client[kQueue].splice(client[kPendingIdx], 1) + } + } +} + +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} + +function write (client, request) { + if (client[kHTTPConnVersion] === 'h2') { + writeH2(client, client[kHTTP2Session], request) + return + } + + const { body, method, path, host, upgrade, headers, blocking, reset } = request + + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } + + const bodyLength = util.bodyLength(body) + + let contentLength = bodyLength + + if (contentLength === null) { + contentLength = request.contentLength + } + + if (contentLength === 0 && !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } + + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + const socket = client[kSocket] + + try { + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + + util.destroy(socket, new InformationalError('aborted')) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false + } + + if (method === 'HEAD') { + // https://github.com/mcollina/undici/issues/258 + // Close after a HEAD request to interop with misbehaving servers + // that may send a body in the response. + + socket[kReset] = true + } + + if (upgrade || method === 'CONNECT') { + // On CONNECT or upgrade, block pipeline from dispatching further + // requests on this connection. + + socket[kReset] = true + } + + if (reset != null) { + socket[kReset] = reset + } + + if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { + socket[kReset] = true + } + + if (blocking) { + socket[kBlocking] = true + } + + let header = `${method} ${path} HTTP/1.1\r\n` + + if (typeof host === 'string') { + header += `host: ${host}\r\n` + } else { + header += client[kHostHeader] + } + + if (upgrade) { + header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` + } else if (client[kPipelining] && !socket[kReset]) { + header += 'connection: keep-alive\r\n' + } else { + header += 'connection: close\r\n' + } + + if (headers) { + header += headers + } + + if (channels.sendHeaders.hasSubscribers) { + channels.sendHeaders.publish({ request, headers: header, socket }) + } + + /* istanbul ignore else: assertion */ + if (!body || bodyLength === 0) { + if (contentLength === 0) { + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + assert(contentLength === null, 'no body must not have content length') + socket.write(`${header}\r\n`, 'latin1') + } + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(body) + socket.uncork() + request.onBodySent(body) + request.onRequestSent() + if (!expectsPayload) { + socket[kReset] = true + } + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) + } else { + writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) + } + } else if (util.isStream(body)) { + writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) + } else if (util.isIterable(body)) { + writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) + } else { + assert(false) + } + + return true +} + +function writeH2 (client, session, request) { + const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request + + let headers + if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) + else headers = reqHeaders + + if (upgrade) { + errorRequest(client, request, new Error('Upgrade not supported for H2')) + return false + } + + try { + // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false + } + + /** @type {import('node:http2').ClientHttp2Stream} */ + let stream + const h2State = client[kHTTP2SessionState] + + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] + headers[HTTP2_HEADER_METHOD] = method + + if (method === 'CONNECT') { + session.ref() + // we are already connected, streams are pending, first request + // will create a new stream. We trigger a request to create the stream and wait until + // `ready` event is triggered + // We disabled endStream to allow the user to write to the stream + stream = session.request(headers, { endStream: false, signal }) + + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + } else { + stream.once('ready', () => { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + }) + } + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) session.unref() + }) + + return true + } + + // https://tools.ietf.org/html/rfc7540#section-8.3 + // :path and :scheme headers must be omited when sending CONNECT + + headers[HTTP2_HEADER_PATH] = path + headers[HTTP2_HEADER_SCHEME] = 'https' + + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } + + let contentLength = util.bodyLength(body) + + if (contentLength == null) { + contentLength = request.contentLength + } + + if (contentLength === 0 || !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } + + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + if (contentLength != null) { + assert(body, 'no body must not have content length') + headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` + } + + session.ref() + + const shouldEndStream = method === 'GET' || method === 'HEAD' + if (expectContinue) { + headers[HTTP2_HEADER_EXPECT] = '100-continue' + stream = session.request(headers, { endStream: shouldEndStream, signal }) + + stream.once('continue', writeBodyH2) + } else { + stream = session.request(headers, { + endStream: shouldEndStream, + signal + }) + writeBodyH2() + } + + // Increment counter as we have new several streams open + ++h2State.openStreams + + stream.once('response', headers => { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { + stream.pause() + } + }) + + stream.once('end', () => { + request.onComplete([]) + }) + + stream.on('data', (chunk) => { + if (request.onData(chunk) === false) { + stream.pause() + } + }) + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) { + session.unref() + } + }) + + stream.once('error', function (err) { + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + + stream.once('frameError', (type, code) => { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + errorRequest(client, request, err) + + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + + // stream.on('aborted', () => { + // // TODO(HTTP/2): Support aborted + // }) + + // stream.on('timeout', () => { + // // TODO(HTTP/2): Support timeout + // }) + + // stream.on('push', headers => { + // // TODO(HTTP/2): Suppor push + // }) + + // stream.on('trailers', headers => { + // // TODO(HTTP/2): Support trailers + // }) + + return true + + function writeBodyH2 () { + /* istanbul ignore else: assertion */ + if (!body) { + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + stream.cork() + stream.write(body) + stream.uncork() + stream.end() + request.onBodySent(body) + request.onRequestSent() + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ + client, + request, + contentLength, + h2stream: stream, + expectsPayload, + body: body.stream(), + socket: client[kSocket], + header: '' + }) + } else { + writeBlob({ + body, + client, + request, + contentLength, + expectsPayload, + h2stream: stream, + header: '', + socket: client[kSocket] + }) + } + } else if (util.isStream(body)) { + writeStream({ + body, + client, + request, + contentLength, + expectsPayload, + socket: client[kSocket], + h2stream: stream, + header: '' + }) + } else if (util.isIterable(body)) { + writeIterable({ + body, + client, + request, + contentLength, + expectsPayload, + header: '', + h2stream: stream, + socket: client[kSocket] + }) + } else { + assert(false) + } + } +} + +function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') + + if (client[kHTTPConnVersion] === 'h2') { + // For HTTP/2, is enough to pipe the stream + const pipe = pipeline( + body, + h2stream, + (err) => { + if (err) { + util.destroy(body, err) + util.destroy(h2stream, err) + } else { + request.onRequestSent() + } + } + ) + + pipe.on('data', onPipeData) + pipe.once('end', () => { + pipe.removeListener('data', onPipeData) + util.destroy(pipe) + }) + + function onPipeData (chunk) { + request.onBodySent(chunk) + } + + return + } + + let finished = false + + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + + const onData = function (chunk) { + if (finished) { + return + } + + try { + if (!writer.write(chunk) && this.pause) { + this.pause() + } + } catch (err) { + util.destroy(this, err) + } + } + const onDrain = function () { + if (finished) { + return + } + + if (body.resume) { + body.resume() + } + } + const onAbort = function () { + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) + } + const onFinished = function (err) { + if (finished) { + return + } + + finished = true + + assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) + + socket + .off('drain', onDrain) + .off('error', onFinished) + + body + .removeListener('data', onData) + .removeListener('end', onFinished) + .removeListener('error', onFinished) + .removeListener('close', onAbort) + + if (!err) { + try { + writer.end() + } catch (er) { + err = er + } + } + + writer.destroy(err) + + if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { + util.destroy(body, err) + } else { + util.destroy(body) + } + } + + body + .on('data', onData) + .on('end', onFinished) + .on('error', onFinished) + .on('close', onAbort) + + if (body.resume) { + body.resume() + } + + socket + .on('drain', onDrain) + .on('error', onFinished) +} + +async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength === body.size, 'blob body must have content length') + + const isH2 = client[kHTTPConnVersion] === 'h2' + try { + if (contentLength != null && contentLength !== body.size) { + throw new RequestContentLengthMismatchError() + } + + const buffer = Buffer.from(await body.arrayBuffer()) + + if (isH2) { + h2stream.cork() + h2stream.write(buffer) + h2stream.uncork() + } else { + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(buffer) + socket.uncork() + } + + request.onBodySent(buffer) + request.onRequestSent() + + if (!expectsPayload) { + socket[kReset] = true + } + + resume(client) + } catch (err) { + util.destroy(isH2 ? h2stream : socket, err) + } +} + +async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') + + let callback = null + function onDrain () { + if (callback) { + const cb = callback + callback = null + cb() + } + } + + const waitForDrain = () => new Promise((resolve, reject) => { + assert(callback === null) + + if (socket[kError]) { + reject(socket[kError]) + } else { + callback = resolve + } + }) + + if (client[kHTTPConnVersion] === 'h2') { + h2stream + .on('close', onDrain) + .on('drain', onDrain) + + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } + + const res = h2stream.write(chunk) + request.onBodySent(chunk) + if (!res) { + await waitForDrain() + } + } + } catch (err) { + h2stream.destroy(err) + } finally { + request.onRequestSent() + h2stream.end() + h2stream + .off('close', onDrain) + .off('drain', onDrain) + } + + return + } + + socket + .on('close', onDrain) + .on('drain', onDrain) + + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } + + if (!writer.write(chunk)) { + await waitForDrain() + } + } + + writer.end() + } catch (err) { + writer.destroy(err) + } finally { + socket + .off('close', onDrain) + .off('drain', onDrain) + } +} + +class AsyncWriter { + constructor ({ socket, request, contentLength, client, expectsPayload, header }) { + this.socket = socket + this.request = request + this.contentLength = contentLength + this.client = client + this.bytesWritten = 0 + this.expectsPayload = expectsPayload + this.header = header + + socket[kWriting] = true + } + + write (chunk) { + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return false + } + + const len = Buffer.byteLength(chunk) + if (!len) { + return true + } + + // We should defer writing chunks. + if (contentLength !== null && bytesWritten + len > contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + socket.cork() + + if (bytesWritten === 0) { + if (!expectsPayload) { + socket[kReset] = true + } + + if (contentLength === null) { + socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') + } else { + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + } + } + + if (contentLength === null) { + socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') + } + + this.bytesWritten += len + + const ret = socket.write(chunk) + + socket.uncork() + + request.onBodySent(chunk) + + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + } + + return ret + } + + end () { + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this + request.onRequestSent() + + socket[kWriting] = false + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return + } + + if (bytesWritten === 0) { + if (expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. + + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + socket.write(`${header}\r\n`, 'latin1') + } + } else if (contentLength === null) { + socket.write('\r\n0\r\n\r\n', 'latin1') + } + + if (contentLength !== null && bytesWritten !== contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } else { + process.emitWarning(new RequestContentLengthMismatchError()) + } + } + + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + + resume(client) + } + + destroy (err) { + const { socket, client } = this + + socket[kWriting] = false + + if (err) { + assert(client[kRunning] <= 1, 'pipeline should only contain this request') + util.destroy(socket, err) + } + } +} + +function errorRequest (client, request, err) { + try { + request.onError(err) + assert(request.aborted) + } catch (err) { + client.emit('error', err) + } +} + +module.exports = Client diff --git a/node_modules/undici/lib/compat/dispatcher-weakref.js b/node_modules/undici/lib/compat/dispatcher-weakref.js new file mode 100644 index 0000000..8cb99e2 --- /dev/null +++ b/node_modules/undici/lib/compat/dispatcher-weakref.js @@ -0,0 +1,48 @@ +'use strict' + +/* istanbul ignore file: only for Node 12 */ + +const { kConnected, kSize } = require('../core/symbols') + +class CompatWeakRef { + constructor (value) { + this.value = value + } + + deref () { + return this.value[kConnected] === 0 && this.value[kSize] === 0 + ? undefined + : this.value + } +} + +class CompatFinalizer { + constructor (finalizer) { + this.finalizer = finalizer + } + + register (dispatcher, key) { + if (dispatcher.on) { + dispatcher.on('disconnect', () => { + if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { + this.finalizer(key) + } + }) + } + } +} + +module.exports = function () { + // FIXME: remove workaround when the Node bug is fixed + // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 + if (process.env.NODE_V8_COVERAGE) { + return { + WeakRef: CompatWeakRef, + FinalizationRegistry: CompatFinalizer + } + } + return { + WeakRef: global.WeakRef || CompatWeakRef, + FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer + } +} diff --git a/node_modules/undici/lib/cookies/constants.js b/node_modules/undici/lib/cookies/constants.js new file mode 100644 index 0000000..85f1fec --- /dev/null +++ b/node_modules/undici/lib/cookies/constants.js @@ -0,0 +1,12 @@ +'use strict' + +// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size +const maxAttributeValueSize = 1024 + +// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size +const maxNameValuePairSize = 4096 + +module.exports = { + maxAttributeValueSize, + maxNameValuePairSize +} diff --git a/node_modules/undici/lib/cookies/index.js b/node_modules/undici/lib/cookies/index.js new file mode 100644 index 0000000..c9c1f28 --- /dev/null +++ b/node_modules/undici/lib/cookies/index.js @@ -0,0 +1,184 @@ +'use strict' + +const { parseSetCookie } = require('./parse') +const { stringify, getHeadersList } = require('./util') +const { webidl } = require('../fetch/webidl') +const { Headers } = require('../fetch/headers') + +/** + * @typedef {Object} Cookie + * @property {string} name + * @property {string} value + * @property {Date|number|undefined} expires + * @property {number|undefined} maxAge + * @property {string|undefined} domain + * @property {string|undefined} path + * @property {boolean|undefined} secure + * @property {boolean|undefined} httpOnly + * @property {'Strict'|'Lax'|'None'} sameSite + * @property {string[]} unparsed + */ + +/** + * @param {Headers} headers + * @returns {Record} + */ +function getCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookie = headers.get('cookie') + const out = {} + + if (!cookie) { + return out + } + + for (const piece of cookie.split(';')) { + const [name, ...value] = piece.split('=') + + out[name.trim()] = value.join('=') + } + + return out +} + +/** + * @param {Headers} headers + * @param {string} name + * @param {{ path?: string, domain?: string }|undefined} attributes + * @returns {void} + */ +function deleteCookie (headers, name, attributes) { + webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + name = webidl.converters.DOMString(name) + attributes = webidl.converters.DeleteCookieAttributes(attributes) + + // Matches behavior of + // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 + setCookie(headers, { + name, + value: '', + expires: new Date(0), + ...attributes + }) +} + +/** + * @param {Headers} headers + * @returns {Cookie[]} + */ +function getSetCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookies = getHeadersList(headers).cookies + + if (!cookies) { + return [] + } + + // In older versions of undici, cookies is a list of name:value. + return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) +} + +/** + * @param {Headers} headers + * @param {Cookie} cookie + * @returns {void} + */ +function setCookie (headers, cookie) { + webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + cookie = webidl.converters.Cookie(cookie) + + const str = stringify(cookie) + + if (str) { + headers.append('Set-Cookie', stringify(cookie)) + } +} + +webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + } +]) + +webidl.converters.Cookie = webidl.dictionaryConverter([ + { + converter: webidl.converters.DOMString, + key: 'name' + }, + { + converter: webidl.converters.DOMString, + key: 'value' + }, + { + converter: webidl.nullableConverter((value) => { + if (typeof value === 'number') { + return webidl.converters['unsigned long long'](value) + } + + return new Date(value) + }), + key: 'expires', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters['long long']), + key: 'maxAge', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'secure', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'httpOnly', + defaultValue: null + }, + { + converter: webidl.converters.USVString, + key: 'sameSite', + allowedValues: ['Strict', 'Lax', 'None'] + }, + { + converter: webidl.sequenceConverter(webidl.converters.DOMString), + key: 'unparsed', + defaultValue: [] + } +]) + +module.exports = { + getCookies, + deleteCookie, + getSetCookies, + setCookie +} diff --git a/node_modules/undici/lib/cookies/parse.js b/node_modules/undici/lib/cookies/parse.js new file mode 100644 index 0000000..aae2750 --- /dev/null +++ b/node_modules/undici/lib/cookies/parse.js @@ -0,0 +1,317 @@ +'use strict' + +const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants') +const { isCTLExcludingHtab } = require('./util') +const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL') +const assert = require('assert') + +/** + * @description Parses the field-value attributes of a set-cookie header string. + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} header + * @returns if the header is invalid, null will be returned + */ +function parseSetCookie (header) { + // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F + // character (CTL characters excluding HTAB): Abort these steps and + // ignore the set-cookie-string entirely. + if (isCTLExcludingHtab(header)) { + return null + } + + let nameValuePair = '' + let unparsedAttributes = '' + let name = '' + let value = '' + + // 2. If the set-cookie-string contains a %x3B (";") character: + if (header.includes(';')) { + // 1. The name-value-pair string consists of the characters up to, + // but not including, the first %x3B (";"), and the unparsed- + // attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question). + const position = { position: 0 } + + nameValuePair = collectASequenceOfCodePointsFast(';', header, position) + unparsedAttributes = header.slice(position.position) + } else { + // Otherwise: + + // 1. The name-value-pair string consists of all the characters + // contained in the set-cookie-string, and the unparsed- + // attributes is the empty string. + nameValuePair = header + } + + // 3. If the name-value-pair string lacks a %x3D ("=") character, then + // the name string is empty, and the value string is the value of + // name-value-pair. + if (!nameValuePair.includes('=')) { + value = nameValuePair + } else { + // Otherwise, the name string consists of the characters up to, but + // not including, the first %x3D ("=") character, and the (possibly + // empty) value string consists of the characters after the first + // %x3D ("=") character. + const position = { position: 0 } + name = collectASequenceOfCodePointsFast( + '=', + nameValuePair, + position + ) + value = nameValuePair.slice(position.position + 1) + } + + // 4. Remove any leading or trailing WSP characters from the name + // string and the value string. + name = name.trim() + value = value.trim() + + // 5. If the sum of the lengths of the name string and the value string + // is more than 4096 octets, abort these steps and ignore the set- + // cookie-string entirely. + if (name.length + value.length > maxNameValuePairSize) { + return null + } + + // 6. The cookie-name is the name string, and the cookie-value is the + // value string. + return { + name, value, ...parseUnparsedAttributes(unparsedAttributes) + } +} + +/** + * Parses the remaining attributes of a set-cookie header + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} unparsedAttributes + * @param {[Object.]={}} cookieAttributeList + */ +function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { + // 1. If the unparsed-attributes string is empty, skip the rest of + // these steps. + if (unparsedAttributes.length === 0) { + return cookieAttributeList + } + + // 2. Discard the first character of the unparsed-attributes (which + // will be a %x3B (";") character). + assert(unparsedAttributes[0] === ';') + unparsedAttributes = unparsedAttributes.slice(1) + + let cookieAv = '' + + // 3. If the remaining unparsed-attributes contains a %x3B (";") + // character: + if (unparsedAttributes.includes(';')) { + // 1. Consume the characters of the unparsed-attributes up to, but + // not including, the first %x3B (";") character. + cookieAv = collectASequenceOfCodePointsFast( + ';', + unparsedAttributes, + { position: 0 } + ) + unparsedAttributes = unparsedAttributes.slice(cookieAv.length) + } else { + // Otherwise: + + // 1. Consume the remainder of the unparsed-attributes. + cookieAv = unparsedAttributes + unparsedAttributes = '' + } + + // Let the cookie-av string be the characters consumed in this step. + + let attributeName = '' + let attributeValue = '' + + // 4. If the cookie-av string contains a %x3D ("=") character: + if (cookieAv.includes('=')) { + // 1. The (possibly empty) attribute-name string consists of the + // characters up to, but not including, the first %x3D ("=") + // character, and the (possibly empty) attribute-value string + // consists of the characters after the first %x3D ("=") + // character. + const position = { position: 0 } + + attributeName = collectASequenceOfCodePointsFast( + '=', + cookieAv, + position + ) + attributeValue = cookieAv.slice(position.position + 1) + } else { + // Otherwise: + + // 1. The attribute-name string consists of the entire cookie-av + // string, and the attribute-value string is empty. + attributeName = cookieAv + } + + // 5. Remove any leading or trailing WSP characters from the attribute- + // name string and the attribute-value string. + attributeName = attributeName.trim() + attributeValue = attributeValue.trim() + + // 6. If the attribute-value is longer than 1024 octets, ignore the + // cookie-av string and return to Step 1 of this algorithm. + if (attributeValue.length > maxAttributeValueSize) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 7. Process the attribute-name and attribute-value according to the + // requirements in the following subsections. (Notice that + // attributes with unrecognized attribute-names are ignored.) + const attributeNameLowercase = attributeName.toLowerCase() + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 + // If the attribute-name case-insensitively matches the string + // "Expires", the user agent MUST process the cookie-av as follows. + if (attributeNameLowercase === 'expires') { + // 1. Let the expiry-time be the result of parsing the attribute-value + // as cookie-date (see Section 5.1.1). + const expiryTime = new Date(attributeValue) + + // 2. If the attribute-value failed to parse as a cookie date, ignore + // the cookie-av. + + cookieAttributeList.expires = expiryTime + } else if (attributeNameLowercase === 'max-age') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 + // If the attribute-name case-insensitively matches the string "Max- + // Age", the user agent MUST process the cookie-av as follows. + + // 1. If the first character of the attribute-value is not a DIGIT or a + // "-" character, ignore the cookie-av. + const charCode = attributeValue.charCodeAt(0) + + if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 2. If the remainder of attribute-value contains a non-DIGIT + // character, ignore the cookie-av. + if (!/^\d+$/.test(attributeValue)) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 3. Let delta-seconds be the attribute-value converted to an integer. + const deltaSeconds = Number(attributeValue) + + // 4. Let cookie-age-limit be the maximum age of the cookie (which + // SHOULD be 400 days or less, see Section 4.1.2.2). + + // 5. Set delta-seconds to the smaller of its present value and cookie- + // age-limit. + // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) + + // 6. If delta-seconds is less than or equal to zero (0), let expiry- + // time be the earliest representable date and time. Otherwise, let + // the expiry-time be the current date and time plus delta-seconds + // seconds. + // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds + + // 7. Append an attribute to the cookie-attribute-list with an + // attribute-name of Max-Age and an attribute-value of expiry-time. + cookieAttributeList.maxAge = deltaSeconds + } else if (attributeNameLowercase === 'domain') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 + // If the attribute-name case-insensitively matches the string "Domain", + // the user agent MUST process the cookie-av as follows. + + // 1. Let cookie-domain be the attribute-value. + let cookieDomain = attributeValue + + // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be + // cookie-domain without its leading %x2E ("."). + if (cookieDomain[0] === '.') { + cookieDomain = cookieDomain.slice(1) + } + + // 3. Convert the cookie-domain to lower case. + cookieDomain = cookieDomain.toLowerCase() + + // 4. Append an attribute to the cookie-attribute-list with an + // attribute-name of Domain and an attribute-value of cookie-domain. + cookieAttributeList.domain = cookieDomain + } else if (attributeNameLowercase === 'path') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 + // If the attribute-name case-insensitively matches the string "Path", + // the user agent MUST process the cookie-av as follows. + + // 1. If the attribute-value is empty or if the first character of the + // attribute-value is not %x2F ("/"): + let cookiePath = '' + if (attributeValue.length === 0 || attributeValue[0] !== '/') { + // 1. Let cookie-path be the default-path. + cookiePath = '/' + } else { + // Otherwise: + + // 1. Let cookie-path be the attribute-value. + cookiePath = attributeValue + } + + // 2. Append an attribute to the cookie-attribute-list with an + // attribute-name of Path and an attribute-value of cookie-path. + cookieAttributeList.path = cookiePath + } else if (attributeNameLowercase === 'secure') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 + // If the attribute-name case-insensitively matches the string "Secure", + // the user agent MUST append an attribute to the cookie-attribute-list + // with an attribute-name of Secure and an empty attribute-value. + + cookieAttributeList.secure = true + } else if (attributeNameLowercase === 'httponly') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 + // If the attribute-name case-insensitively matches the string + // "HttpOnly", the user agent MUST append an attribute to the cookie- + // attribute-list with an attribute-name of HttpOnly and an empty + // attribute-value. + + cookieAttributeList.httpOnly = true + } else if (attributeNameLowercase === 'samesite') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 + // If the attribute-name case-insensitively matches the string + // "SameSite", the user agent MUST process the cookie-av as follows: + + // 1. Let enforcement be "Default". + let enforcement = 'Default' + + const attributeValueLowercase = attributeValue.toLowerCase() + // 2. If cookie-av's attribute-value is a case-insensitive match for + // "None", set enforcement to "None". + if (attributeValueLowercase.includes('none')) { + enforcement = 'None' + } + + // 3. If cookie-av's attribute-value is a case-insensitive match for + // "Strict", set enforcement to "Strict". + if (attributeValueLowercase.includes('strict')) { + enforcement = 'Strict' + } + + // 4. If cookie-av's attribute-value is a case-insensitive match for + // "Lax", set enforcement to "Lax". + if (attributeValueLowercase.includes('lax')) { + enforcement = 'Lax' + } + + // 5. Append an attribute to the cookie-attribute-list with an + // attribute-name of "SameSite" and an attribute-value of + // enforcement. + cookieAttributeList.sameSite = enforcement + } else { + cookieAttributeList.unparsed ??= [] + + cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) + } + + // 8. Return to Step 1 of this algorithm. + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) +} + +module.exports = { + parseSetCookie, + parseUnparsedAttributes +} diff --git a/node_modules/undici/lib/cookies/util.js b/node_modules/undici/lib/cookies/util.js new file mode 100644 index 0000000..2290329 --- /dev/null +++ b/node_modules/undici/lib/cookies/util.js @@ -0,0 +1,291 @@ +'use strict' + +const assert = require('assert') +const { kHeadersList } = require('../core/symbols') + +function isCTLExcludingHtab (value) { + if (value.length === 0) { + return false + } + + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + (code >= 0x00 || code <= 0x08) || + (code >= 0x0A || code <= 0x1F) || + code === 0x7F + ) { + return false + } + } +} + +/** + CHAR = + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + * @param {string} name + */ +function validateCookieName (name) { + for (const char of name) { + const code = char.charCodeAt(0) + + if ( + (code <= 0x20 || code > 0x7F) || + char === '(' || + char === ')' || + char === '>' || + char === '<' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' + ) { + throw new Error('Invalid cookie name') + } + } +} + +/** + cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + ; US-ASCII characters excluding CTLs, + ; whitespace DQUOTE, comma, semicolon, + ; and backslash + * @param {string} value + */ +function validateCookieValue (value) { + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || // exclude CTLs (0-31) + code === 0x22 || + code === 0x2C || + code === 0x3B || + code === 0x5C || + code > 0x7E // non-ascii + ) { + throw new Error('Invalid header value') + } + } +} + +/** + * path-value = + * @param {string} path + */ +function validateCookiePath (path) { + for (const char of path) { + const code = char.charCodeAt(0) + + if (code < 0x21 || char === ';') { + throw new Error('Invalid cookie path') + } + } +} + +/** + * I have no idea why these values aren't allowed to be honest, + * but Deno tests these. - Khafra + * @param {string} domain + */ +function validateCookieDomain (domain) { + if ( + domain.startsWith('-') || + domain.endsWith('.') || + domain.endsWith('-') + ) { + throw new Error('Invalid cookie domain') + } +} + +/** + * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 + * @param {number|Date} date + IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT + ; fixed length/zone/capitalization subset of the format + ; see Section 3.3 of [RFC5322] + + day-name = %x4D.6F.6E ; "Mon", case-sensitive + / %x54.75.65 ; "Tue", case-sensitive + / %x57.65.64 ; "Wed", case-sensitive + / %x54.68.75 ; "Thu", case-sensitive + / %x46.72.69 ; "Fri", case-sensitive + / %x53.61.74 ; "Sat", case-sensitive + / %x53.75.6E ; "Sun", case-sensitive + date1 = day SP month SP year + ; e.g., 02 Jun 1982 + + day = 2DIGIT + month = %x4A.61.6E ; "Jan", case-sensitive + / %x46.65.62 ; "Feb", case-sensitive + / %x4D.61.72 ; "Mar", case-sensitive + / %x41.70.72 ; "Apr", case-sensitive + / %x4D.61.79 ; "May", case-sensitive + / %x4A.75.6E ; "Jun", case-sensitive + / %x4A.75.6C ; "Jul", case-sensitive + / %x41.75.67 ; "Aug", case-sensitive + / %x53.65.70 ; "Sep", case-sensitive + / %x4F.63.74 ; "Oct", case-sensitive + / %x4E.6F.76 ; "Nov", case-sensitive + / %x44.65.63 ; "Dec", case-sensitive + year = 4DIGIT + + GMT = %x47.4D.54 ; "GMT", case-sensitive + + time-of-day = hour ":" minute ":" second + ; 00:00:00 - 23:59:60 (leap second) + + hour = 2DIGIT + minute = 2DIGIT + second = 2DIGIT + */ +function toIMFDate (date) { + if (typeof date === 'number') { + date = new Date(date) + } + + const days = [ + 'Sun', 'Mon', 'Tue', 'Wed', + 'Thu', 'Fri', 'Sat' + ] + + const months = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] + + const dayName = days[date.getUTCDay()] + const day = date.getUTCDate().toString().padStart(2, '0') + const month = months[date.getUTCMonth()] + const year = date.getUTCFullYear() + const hour = date.getUTCHours().toString().padStart(2, '0') + const minute = date.getUTCMinutes().toString().padStart(2, '0') + const second = date.getUTCSeconds().toString().padStart(2, '0') + + return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` +} + +/** + max-age-av = "Max-Age=" non-zero-digit *DIGIT + ; In practice, both expires-av and max-age-av + ; are limited to dates representable by the + ; user agent. + * @param {number} maxAge + */ +function validateCookieMaxAge (maxAge) { + if (maxAge < 0) { + throw new Error('Invalid cookie max-age') + } +} + +/** + * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * @param {import('./index').Cookie} cookie + */ +function stringify (cookie) { + if (cookie.name.length === 0) { + return null + } + + validateCookieName(cookie.name) + validateCookieValue(cookie.value) + + const out = [`${cookie.name}=${cookie.value}`] + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 + if (cookie.name.startsWith('__Secure-')) { + cookie.secure = true + } + + if (cookie.name.startsWith('__Host-')) { + cookie.secure = true + cookie.domain = null + cookie.path = '/' + } + + if (cookie.secure) { + out.push('Secure') + } + + if (cookie.httpOnly) { + out.push('HttpOnly') + } + + if (typeof cookie.maxAge === 'number') { + validateCookieMaxAge(cookie.maxAge) + out.push(`Max-Age=${cookie.maxAge}`) + } + + if (cookie.domain) { + validateCookieDomain(cookie.domain) + out.push(`Domain=${cookie.domain}`) + } + + if (cookie.path) { + validateCookiePath(cookie.path) + out.push(`Path=${cookie.path}`) + } + + if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { + out.push(`Expires=${toIMFDate(cookie.expires)}`) + } + + if (cookie.sameSite) { + out.push(`SameSite=${cookie.sameSite}`) + } + + for (const part of cookie.unparsed) { + if (!part.includes('=')) { + throw new Error('Invalid unparsed') + } + + const [key, ...value] = part.split('=') + + out.push(`${key.trim()}=${value.join('=')}`) + } + + return out.join('; ') +} + +let kHeadersListNode + +function getHeadersList (headers) { + if (headers[kHeadersList]) { + return headers[kHeadersList] + } + + if (!kHeadersListNode) { + kHeadersListNode = Object.getOwnPropertySymbols(headers).find( + (symbol) => symbol.description === 'headers list' + ) + + assert(kHeadersListNode, 'Headers cannot be parsed') + } + + const headersList = headers[kHeadersListNode] + assert(headersList) + + return headersList +} + +module.exports = { + isCTLExcludingHtab, + stringify, + getHeadersList +} diff --git a/node_modules/undici/lib/core/connect.js b/node_modules/undici/lib/core/connect.js new file mode 100644 index 0000000..3309117 --- /dev/null +++ b/node_modules/undici/lib/core/connect.js @@ -0,0 +1,189 @@ +'use strict' + +const net = require('net') +const assert = require('assert') +const util = require('./util') +const { InvalidArgumentError, ConnectTimeoutError } = require('./errors') + +let tls // include tls conditionally since it is not always available + +// TODO: session re-use does not wait for the first +// connection to resolve the session and might therefore +// resolve the same servername multiple times even when +// re-use is enabled. + +let SessionCache +// FIXME: remove workaround when the Node bug is fixed +// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 +if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { + SessionCache = class WeakSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + this._sessionRegistry = new global.FinalizationRegistry((key) => { + if (this._sessionCache.size < this._maxCachedSessions) { + return + } + + const ref = this._sessionCache.get(key) + if (ref !== undefined && ref.deref() === undefined) { + this._sessionCache.delete(key) + } + }) + } + + get (sessionKey) { + const ref = this._sessionCache.get(sessionKey) + return ref ? ref.deref() : null + } + + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + this._sessionCache.set(sessionKey, new WeakRef(session)) + this._sessionRegistry.register(session, sessionKey) + } + } +} else { + SessionCache = class SimpleSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + } + + get (sessionKey) { + return this._sessionCache.get(sessionKey) + } + + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + if (this._sessionCache.size >= this._maxCachedSessions) { + // remove the oldest session + const { value: oldestKey } = this._sessionCache.keys().next() + this._sessionCache.delete(oldestKey) + } + + this._sessionCache.set(sessionKey, session) + } + } +} + +function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { + if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { + throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + } + + const options = { path: socketPath, ...opts } + const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) + timeout = timeout == null ? 10e3 : timeout + allowH2 = allowH2 != null ? allowH2 : false + return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { + let socket + if (protocol === 'https:') { + if (!tls) { + tls = require('tls') + } + servername = servername || options.servername || util.getServerName(host) || null + + const sessionKey = servername || hostname + const session = sessionCache.get(sessionKey) || null + + assert(sessionKey) + + socket = tls.connect({ + highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... + ...options, + servername, + session, + localAddress, + // TODO(HTTP/2): Add support for h2c + ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], + socket: httpSocket, // upgrade socket connection + port: port || 443, + host: hostname + }) + + socket + .on('session', function (session) { + // TODO (fix): Can a session become invalid once established? Don't think so? + sessionCache.set(sessionKey, session) + }) + } else { + assert(!httpSocket, 'httpSocket can only be sent on TLS update') + socket = net.connect({ + highWaterMark: 64 * 1024, // Same as nodejs fs streams. + ...options, + localAddress, + port: port || 80, + host: hostname + }) + } + + // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket + if (options.keepAlive == null || options.keepAlive) { + const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay + socket.setKeepAlive(true, keepAliveInitialDelay) + } + + const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + + socket + .setNoDelay(true) + .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(null, this) + } + }) + .on('error', function (err) { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(err) + } + }) + + return socket + } +} + +function setupTimeout (onConnectTimeout, timeout) { + if (!timeout) { + return () => {} + } + + let s1 = null + let s2 = null + const timeoutId = setTimeout(() => { + // setImmediate is added to make sure that we priotorise socket error events over timeouts + s1 = setImmediate(() => { + if (process.platform === 'win32') { + // Windows needs an extra setImmediate probably due to implementation differences in the socket logic + s2 = setImmediate(() => onConnectTimeout()) + } else { + onConnectTimeout() + } + }) + }, timeout) + return () => { + clearTimeout(timeoutId) + clearImmediate(s1) + clearImmediate(s2) + } +} + +function onConnectTimeout (socket) { + util.destroy(socket, new ConnectTimeoutError()) +} + +module.exports = buildConnector diff --git a/node_modules/undici/lib/core/errors.js b/node_modules/undici/lib/core/errors.js new file mode 100644 index 0000000..7af704b --- /dev/null +++ b/node_modules/undici/lib/core/errors.js @@ -0,0 +1,230 @@ +'use strict' + +class UndiciError extends Error { + constructor (message) { + super(message) + this.name = 'UndiciError' + this.code = 'UND_ERR' + } +} + +class ConnectTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ConnectTimeoutError) + this.name = 'ConnectTimeoutError' + this.message = message || 'Connect Timeout Error' + this.code = 'UND_ERR_CONNECT_TIMEOUT' + } +} + +class HeadersTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersTimeoutError) + this.name = 'HeadersTimeoutError' + this.message = message || 'Headers Timeout Error' + this.code = 'UND_ERR_HEADERS_TIMEOUT' + } +} + +class HeadersOverflowError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersOverflowError) + this.name = 'HeadersOverflowError' + this.message = message || 'Headers Overflow Error' + this.code = 'UND_ERR_HEADERS_OVERFLOW' + } +} + +class BodyTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, BodyTimeoutError) + this.name = 'BodyTimeoutError' + this.message = message || 'Body Timeout Error' + this.code = 'UND_ERR_BODY_TIMEOUT' + } +} + +class ResponseStatusCodeError extends UndiciError { + constructor (message, statusCode, headers, body) { + super(message) + Error.captureStackTrace(this, ResponseStatusCodeError) + this.name = 'ResponseStatusCodeError' + this.message = message || 'Response Status Code Error' + this.code = 'UND_ERR_RESPONSE_STATUS_CODE' + this.body = body + this.status = statusCode + this.statusCode = statusCode + this.headers = headers + } +} + +class InvalidArgumentError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidArgumentError) + this.name = 'InvalidArgumentError' + this.message = message || 'Invalid Argument Error' + this.code = 'UND_ERR_INVALID_ARG' + } +} + +class InvalidReturnValueError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidReturnValueError) + this.name = 'InvalidReturnValueError' + this.message = message || 'Invalid Return Value Error' + this.code = 'UND_ERR_INVALID_RETURN_VALUE' + } +} + +class RequestAbortedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestAbortedError) + this.name = 'AbortError' + this.message = message || 'Request aborted' + this.code = 'UND_ERR_ABORTED' + } +} + +class InformationalError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InformationalError) + this.name = 'InformationalError' + this.message = message || 'Request information' + this.code = 'UND_ERR_INFO' + } +} + +class RequestContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestContentLengthMismatchError) + this.name = 'RequestContentLengthMismatchError' + this.message = message || 'Request body length does not match content-length header' + this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' + } +} + +class ResponseContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseContentLengthMismatchError) + this.name = 'ResponseContentLengthMismatchError' + this.message = message || 'Response body length does not match content-length header' + this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' + } +} + +class ClientDestroyedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientDestroyedError) + this.name = 'ClientDestroyedError' + this.message = message || 'The client is destroyed' + this.code = 'UND_ERR_DESTROYED' + } +} + +class ClientClosedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientClosedError) + this.name = 'ClientClosedError' + this.message = message || 'The client is closed' + this.code = 'UND_ERR_CLOSED' + } +} + +class SocketError extends UndiciError { + constructor (message, socket) { + super(message) + Error.captureStackTrace(this, SocketError) + this.name = 'SocketError' + this.message = message || 'Socket error' + this.code = 'UND_ERR_SOCKET' + this.socket = socket + } +} + +class NotSupportedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'NotSupportedError' + this.message = message || 'Not supported error' + this.code = 'UND_ERR_NOT_SUPPORTED' + } +} + +class BalancedPoolMissingUpstreamError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'MissingUpstreamError' + this.message = message || 'No upstream has been added to the BalancedPool' + this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' + } +} + +class HTTPParserError extends Error { + constructor (message, code, data) { + super(message) + Error.captureStackTrace(this, HTTPParserError) + this.name = 'HTTPParserError' + this.code = code ? `HPE_${code}` : undefined + this.data = data ? data.toString() : undefined + } +} + +class ResponseExceededMaxSizeError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseExceededMaxSizeError) + this.name = 'ResponseExceededMaxSizeError' + this.message = message || 'Response content exceeded max size' + this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' + } +} + +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} + +module.exports = { + HTTPParserError, + UndiciError, + HeadersTimeoutError, + HeadersOverflowError, + BodyTimeoutError, + RequestContentLengthMismatchError, + ConnectTimeoutError, + ResponseStatusCodeError, + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError, + ClientDestroyedError, + ClientClosedError, + InformationalError, + SocketError, + NotSupportedError, + ResponseContentLengthMismatchError, + BalancedPoolMissingUpstreamError, + ResponseExceededMaxSizeError, + RequestRetryError +} diff --git a/node_modules/undici/lib/core/request.js b/node_modules/undici/lib/core/request.js new file mode 100644 index 0000000..3697e6a --- /dev/null +++ b/node_modules/undici/lib/core/request.js @@ -0,0 +1,499 @@ +'use strict' + +const { + InvalidArgumentError, + NotSupportedError +} = require('./errors') +const assert = require('assert') +const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols') +const util = require('./util') + +// tokenRegExp and headerCharRegex have been lifted from +// https://github.com/nodejs/node/blob/main/lib/_http_common.js + +/** + * Verifies that the given val is a valid HTTP token + * per the rules defined in RFC 7230 + * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + */ +const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ + +/** + * Matches if val contains an invalid field-vchar + * field-value = *( field-content / obs-fold ) + * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] + * field-vchar = VCHAR / obs-text + */ +const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +// Verifies that a given path is valid does not contain control chars \x00 to \x20 +const invalidPathRegex = /[^\u0021-\u00ff]/ + +const kHandler = Symbol('handler') + +const channels = {} + +let extractBody + +try { + const diagnosticsChannel = require('diagnostics_channel') + channels.create = diagnosticsChannel.channel('undici:request:create') + channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') + channels.headers = diagnosticsChannel.channel('undici:request:headers') + channels.trailers = diagnosticsChannel.channel('undici:request:trailers') + channels.error = diagnosticsChannel.channel('undici:request:error') +} catch { + channels.create = { hasSubscribers: false } + channels.bodySent = { hasSubscribers: false } + channels.headers = { hasSubscribers: false } + channels.trailers = { hasSubscribers: false } + channels.error = { hasSubscribers: false } +} + +class Request { + constructor (origin, { + path, + method, + body, + headers, + query, + idempotent, + blocking, + upgrade, + headersTimeout, + bodyTimeout, + reset, + throwOnError, + expectContinue + }, handler) { + if (typeof path !== 'string') { + throw new InvalidArgumentError('path must be a string') + } else if ( + path[0] !== '/' && + !(path.startsWith('http://') || path.startsWith('https://')) && + method !== 'CONNECT' + ) { + throw new InvalidArgumentError('path must be an absolute URL or start with a slash') + } else if (invalidPathRegex.exec(path) !== null) { + throw new InvalidArgumentError('invalid request path') + } + + if (typeof method !== 'string') { + throw new InvalidArgumentError('method must be a string') + } else if (tokenRegExp.exec(method) === null) { + throw new InvalidArgumentError('invalid request method') + } + + if (upgrade && typeof upgrade !== 'string') { + throw new InvalidArgumentError('upgrade must be a string') + } + + if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('invalid headersTimeout') + } + + if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('invalid bodyTimeout') + } + + if (reset != null && typeof reset !== 'boolean') { + throw new InvalidArgumentError('invalid reset') + } + + if (expectContinue != null && typeof expectContinue !== 'boolean') { + throw new InvalidArgumentError('invalid expectContinue') + } + + this.headersTimeout = headersTimeout + + this.bodyTimeout = bodyTimeout + + this.throwOnError = throwOnError === true + + this.method = method + + this.abort = null + + if (body == null) { + this.body = null + } else if (util.isStream(body)) { + this.body = body + + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) + } + this.body.on('end', this.endHandler) + } + + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err + } + } + this.body.on('error', this.errorHandler) + } else if (util.isBuffer(body)) { + this.body = body.byteLength ? body : null + } else if (ArrayBuffer.isView(body)) { + this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null + } else if (body instanceof ArrayBuffer) { + this.body = body.byteLength ? Buffer.from(body) : null + } else if (typeof body === 'string') { + this.body = body.length ? Buffer.from(body) : null + } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { + this.body = body + } else { + throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') + } + + this.completed = false + + this.aborted = false + + this.upgrade = upgrade || null + + this.path = query ? util.buildURL(path, query) : path + + this.origin = origin + + this.idempotent = idempotent == null + ? method === 'HEAD' || method === 'GET' + : idempotent + + this.blocking = blocking == null ? false : blocking + + this.reset = reset == null ? null : reset + + this.host = null + + this.contentLength = null + + this.contentType = null + + this.headers = '' + + // Only for H2 + this.expectContinue = expectContinue != null ? expectContinue : false + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(this, headers[i], headers[i + 1]) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(this, key, headers[key]) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } + + if (util.isFormDataLike(this.body)) { + if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { + throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') + } + + if (!extractBody) { + extractBody = require('../fetch/body.js').extractBody + } + + const [bodyStream, contentType] = extractBody(body) + if (this.contentType == null) { + this.contentType = contentType + this.headers += `content-type: ${contentType}\r\n` + } + this.body = bodyStream.stream + this.contentLength = bodyStream.length + } else if (util.isBlobLike(body) && this.contentType == null && body.type) { + this.contentType = body.type + this.headers += `content-type: ${body.type}\r\n` + } + + util.validateHandler(handler, method, upgrade) + + this.servername = util.getServerName(this.host) + + this[kHandler] = handler + + if (channels.create.hasSubscribers) { + channels.create.publish({ request: this }) + } + } + + onBodySent (chunk) { + if (this[kHandler].onBodySent) { + try { + return this[kHandler].onBodySent(chunk) + } catch (err) { + this.abort(err) + } + } + } + + onRequestSent () { + if (channels.bodySent.hasSubscribers) { + channels.bodySent.publish({ request: this }) + } + + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) + } + } + } + + onConnect (abort) { + assert(!this.aborted) + assert(!this.completed) + + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) + } + } + + onHeaders (statusCode, headers, resume, statusText) { + assert(!this.aborted) + assert(!this.completed) + + if (channels.headers.hasSubscribers) { + channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) + } + + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } + } + + onData (chunk) { + assert(!this.aborted) + assert(!this.completed) + + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } + } + + onUpgrade (statusCode, headers, socket) { + assert(!this.aborted) + assert(!this.completed) + + return this[kHandler].onUpgrade(statusCode, headers, socket) + } + + onComplete (trailers) { + this.onFinally() + + assert(!this.aborted) + + this.completed = true + if (channels.trailers.hasSubscribers) { + channels.trailers.publish({ request: this, trailers }) + } + + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } + } + + onError (error) { + this.onFinally() + + if (channels.error.hasSubscribers) { + channels.error.publish({ request: this, error }) + } + + if (this.aborted) { + return + } + this.aborted = true + + return this[kHandler].onError(error) + } + + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } + + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null + } + } + + // TODO: adjust to support H2 + addHeader (key, value) { + processHeader(this, key, value) + return this + } + + static [kHTTP1BuildRequest] (origin, opts, handler) { + // TODO: Migrate header parsing here, to make Requests + // HTTP agnostic + return new Request(origin, opts, handler) + } + + static [kHTTP2BuildRequest] (origin, opts, handler) { + const headers = opts.headers + opts = { ...opts, headers: null } + + const request = new Request(origin, opts, handler) + + request.headers = {} + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(request, headers[i], headers[i + 1], true) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(request, key, headers[key], true) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } + + return request + } + + static [kHTTP2CopyHeaders] (raw) { + const rawHeaders = raw.split('\r\n') + const headers = {} + + for (const header of rawHeaders) { + const [key, value] = header.split(': ') + + if (value == null || value.length === 0) continue + + if (headers[key]) headers[key] += `,${value}` + else headers[key] = value + } + + return headers + } +} + +function processHeaderValue (key, val, skipAppend) { + if (val && typeof val === 'object') { + throw new InvalidArgumentError(`invalid ${key} header`) + } + + val = val != null ? `${val}` : '' + + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + + return skipAppend ? val : `${key}: ${val}\r\n` +} + +function processHeader (request, key, val, skipAppend = false) { + if (val && (typeof val === 'object' && !Array.isArray(val))) { + throw new InvalidArgumentError(`invalid ${key} header`) + } else if (val === undefined) { + return + } + + if ( + request.host === null && + key.length === 4 && + key.toLowerCase() === 'host' + ) { + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + // Consumed by Client + request.host = val + } else if ( + request.contentLength === null && + key.length === 14 && + key.toLowerCase() === 'content-length' + ) { + request.contentLength = parseInt(val, 10) + if (!Number.isFinite(request.contentLength)) { + throw new InvalidArgumentError('invalid content-length header') + } + } else if ( + request.contentType === null && + key.length === 12 && + key.toLowerCase() === 'content-type' + ) { + request.contentType = val + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } else if ( + key.length === 17 && + key.toLowerCase() === 'transfer-encoding' + ) { + throw new InvalidArgumentError('invalid transfer-encoding header') + } else if ( + key.length === 10 && + key.toLowerCase() === 'connection' + ) { + const value = typeof val === 'string' ? val.toLowerCase() : null + if (value !== 'close' && value !== 'keep-alive') { + throw new InvalidArgumentError('invalid connection header') + } else if (value === 'close') { + request.reset = true + } + } else if ( + key.length === 10 && + key.toLowerCase() === 'keep-alive' + ) { + throw new InvalidArgumentError('invalid keep-alive header') + } else if ( + key.length === 7 && + key.toLowerCase() === 'upgrade' + ) { + throw new InvalidArgumentError('invalid upgrade header') + } else if ( + key.length === 6 && + key.toLowerCase() === 'expect' + ) { + throw new NotSupportedError('expect header not supported') + } else if (tokenRegExp.exec(key) === null) { + throw new InvalidArgumentError('invalid header key') + } else { + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + if (skipAppend) { + if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` + else request.headers[key] = processHeaderValue(key, val[i], skipAppend) + } else { + request.headers += processHeaderValue(key, val[i]) + } + } + } else { + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } + } +} + +module.exports = Request diff --git a/node_modules/undici/lib/core/symbols.js b/node_modules/undici/lib/core/symbols.js new file mode 100644 index 0000000..68d8566 --- /dev/null +++ b/node_modules/undici/lib/core/symbols.js @@ -0,0 +1,63 @@ +module.exports = { + kClose: Symbol('close'), + kDestroy: Symbol('destroy'), + kDispatch: Symbol('dispatch'), + kUrl: Symbol('url'), + kWriting: Symbol('writing'), + kResuming: Symbol('resuming'), + kQueue: Symbol('queue'), + kConnect: Symbol('connect'), + kConnecting: Symbol('connecting'), + kHeadersList: Symbol('headers list'), + kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), + kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), + kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), + kKeepAliveTimeoutValue: Symbol('keep alive timeout'), + kKeepAlive: Symbol('keep alive'), + kHeadersTimeout: Symbol('headers timeout'), + kBodyTimeout: Symbol('body timeout'), + kServerName: Symbol('server name'), + kLocalAddress: Symbol('local address'), + kHost: Symbol('host'), + kNoRef: Symbol('no ref'), + kBodyUsed: Symbol('used'), + kRunning: Symbol('running'), + kBlocking: Symbol('blocking'), + kPending: Symbol('pending'), + kSize: Symbol('size'), + kBusy: Symbol('busy'), + kQueued: Symbol('queued'), + kFree: Symbol('free'), + kConnected: Symbol('connected'), + kClosed: Symbol('closed'), + kNeedDrain: Symbol('need drain'), + kReset: Symbol('reset'), + kDestroyed: Symbol.for('nodejs.stream.destroyed'), + kMaxHeadersSize: Symbol('max headers size'), + kRunningIdx: Symbol('running index'), + kPendingIdx: Symbol('pending index'), + kError: Symbol('error'), + kClients: Symbol('clients'), + kClient: Symbol('client'), + kParser: Symbol('parser'), + kOnDestroyed: Symbol('destroy callbacks'), + kPipelining: Symbol('pipelining'), + kSocket: Symbol('socket'), + kHostHeader: Symbol('host header'), + kConnector: Symbol('connector'), + kStrictContentLength: Symbol('strict content length'), + kMaxRedirections: Symbol('maxRedirections'), + kMaxRequests: Symbol('maxRequestsPerClient'), + kProxy: Symbol('proxy agent options'), + kCounter: Symbol('socket request counter'), + kInterceptors: Symbol('dispatch interceptors'), + kMaxResponseSize: Symbol('max response size'), + kHTTP2Session: Symbol('http2Session'), + kHTTP2SessionState: Symbol('http2Session state'), + kHTTP2BuildRequest: Symbol('http2 build request'), + kHTTP1BuildRequest: Symbol('http1 build request'), + kHTTP2CopyHeaders: Symbol('http2 copy headers'), + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') +} diff --git a/node_modules/undici/lib/core/util.js b/node_modules/undici/lib/core/util.js new file mode 100644 index 0000000..8d5450b --- /dev/null +++ b/node_modules/undici/lib/core/util.js @@ -0,0 +1,511 @@ +'use strict' + +const assert = require('assert') +const { kDestroyed, kBodyUsed } = require('./symbols') +const { IncomingMessage } = require('http') +const stream = require('stream') +const net = require('net') +const { InvalidArgumentError } = require('./errors') +const { Blob } = require('buffer') +const nodeUtil = require('util') +const { stringify } = require('querystring') + +const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) + +function nop () {} + +function isStream (obj) { + return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +} + +// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) +function isBlobLike (object) { + return (Blob && object instanceof Blob) || ( + object && + typeof object === 'object' && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + /^(Blob|File)$/.test(object[Symbol.toStringTag]) + ) +} + +function buildURL (url, queryParams) { + if (url.includes('?') || url.includes('#')) { + throw new Error('Query params cannot be passed when url already contains "?" or "#".') + } + + const stringified = stringify(queryParams) + + if (stringified) { + url += '?' + stringified + } + + return url +} + +function parseURL (url) { + if (typeof url === 'string') { + url = new URL(url) + + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + return url + } + + if (!url || typeof url !== 'object') { + throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + } + + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + if (!(url instanceof URL)) { + if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { + throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') + } + + if (url.path != null && typeof url.path !== 'string') { + throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') + } + + if (url.pathname != null && typeof url.pathname !== 'string') { + throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') + } + + if (url.hostname != null && typeof url.hostname !== 'string') { + throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') + } + + if (url.origin != null && typeof url.origin !== 'string') { + throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') + } + + const port = url.port != null + ? url.port + : (url.protocol === 'https:' ? 443 : 80) + let origin = url.origin != null + ? url.origin + : `${url.protocol}//${url.hostname}:${port}` + let path = url.path != null + ? url.path + : `${url.pathname || ''}${url.search || ''}` + + if (origin.endsWith('/')) { + origin = origin.substring(0, origin.length - 1) + } + + if (path && !path.startsWith('/')) { + path = `/${path}` + } + // new URL(path, origin) is unsafe when `path` contains an absolute URL + // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: + // If first parameter is a relative URL, second param is required, and will be used as the base URL. + // If first parameter is an absolute URL, a given second param will be ignored. + url = new URL(origin + path) + } + + return url +} + +function parseOrigin (url) { + url = parseURL(url) + + if (url.pathname !== '/' || url.search || url.hash) { + throw new InvalidArgumentError('invalid url') + } + + return url +} + +function getHostname (host) { + if (host[0] === '[') { + const idx = host.indexOf(']') + + assert(idx !== -1) + return host.substring(1, idx) + } + + const idx = host.indexOf(':') + if (idx === -1) return host + + return host.substring(0, idx) +} + +// IP addresses are not valid server names per RFC6066 +// > Currently, the only server names supported are DNS hostnames +function getServerName (host) { + if (!host) { + return null + } + + assert.strictEqual(typeof host, 'string') + + const servername = getHostname(host) + if (net.isIP(servername)) { + return '' + } + + return servername +} + +function deepClone (obj) { + return JSON.parse(JSON.stringify(obj)) +} + +function isAsyncIterable (obj) { + return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') +} + +function isIterable (obj) { + return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) +} + +function bodyLength (body) { + if (body == null) { + return 0 + } else if (isStream(body)) { + const state = body._readableState + return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) + ? state.length + : null + } else if (isBlobLike(body)) { + return body.size != null ? body.size : null + } else if (isBuffer(body)) { + return body.byteLength + } + + return null +} + +function isDestroyed (stream) { + return !stream || !!(stream.destroyed || stream[kDestroyed]) +} + +function isReadableAborted (stream) { + const state = stream && stream._readableState + return isDestroyed(stream) && state && !state.endEmitted +} + +function destroy (stream, err) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { + return + } + + if (typeof stream.destroy === 'function') { + if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { + // See: https://github.com/nodejs/node/pull/38505/files + stream.socket = null + } + + stream.destroy(err) + } else if (err) { + process.nextTick((stream, err) => { + stream.emit('error', err) + }, stream, err) + } + + if (stream.destroyed !== true) { + stream[kDestroyed] = true + } +} + +const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ +function parseKeepAliveTimeout (val) { + const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) + return m ? parseInt(m[1], 10) * 1000 : null +} + +function parseHeaders (headers, obj = {}) { + // For H2 support + if (!Array.isArray(headers)) return headers + + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i].toString().toLowerCase() + let val = obj[key] + + if (!val) { + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1].map(x => x.toString('utf8')) + } else { + obj[key] = headers[i + 1].toString('utf8') + } + } else { + if (!Array.isArray(val)) { + val = [val] + obj[key] = val + } + val.push(headers[i + 1].toString('utf8')) + } + } + + // See https://github.com/nodejs/node/pull/46528 + if ('content-length' in obj && 'content-disposition' in obj) { + obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') + } + + return obj +} + +function parseRawHeaders (headers) { + const ret = [] + let hasContentLength = false + let contentDispositionIdx = -1 + + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0].toString() + const val = headers[n + 1].toString('utf8') + + if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { + ret.push(key, val) + hasContentLength = true + } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { + contentDispositionIdx = ret.push(key, val) - 1 + } else { + ret.push(key, val) + } + } + + // See https://github.com/nodejs/node/pull/46528 + if (hasContentLength && contentDispositionIdx !== -1) { + ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + } + + return ret +} + +function isBuffer (buffer) { + // See, https://github.com/mcollina/undici/pull/319 + return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) +} + +function validateHandler (handler, method, upgrade) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } + + if (typeof handler.onConnect !== 'function') { + throw new InvalidArgumentError('invalid onConnect method') + } + + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { + throw new InvalidArgumentError('invalid onBodySent method') + } + + if (upgrade || method === 'CONNECT') { + if (typeof handler.onUpgrade !== 'function') { + throw new InvalidArgumentError('invalid onUpgrade method') + } + } else { + if (typeof handler.onHeaders !== 'function') { + throw new InvalidArgumentError('invalid onHeaders method') + } + + if (typeof handler.onData !== 'function') { + throw new InvalidArgumentError('invalid onData method') + } + + if (typeof handler.onComplete !== 'function') { + throw new InvalidArgumentError('invalid onComplete method') + } + } +} + +// A body is disturbed if it has been read from and it cannot +// be re-used without losing state or data. +function isDisturbed (body) { + return !!(body && ( + stream.isDisturbed + ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? + : body[kBodyUsed] || + body.readableDidRead || + (body._readableState && body._readableState.dataEmitted) || + isReadableAborted(body) + )) +} + +function isErrored (body) { + return !!(body && ( + stream.isErrored + ? stream.isErrored(body) + : /state: 'errored'/.test(nodeUtil.inspect(body) + ))) +} + +function isReadable (body) { + return !!(body && ( + stream.isReadable + ? stream.isReadable(body) + : /state: 'readable'/.test(nodeUtil.inspect(body) + ))) +} + +function getSocketInfo (socket) { + return { + localAddress: socket.localAddress, + localPort: socket.localPort, + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteFamily, + timeout: socket.timeout, + bytesWritten: socket.bytesWritten, + bytesRead: socket.bytesRead + } +} + +async function * convertIterableToBuffer (iterable) { + for await (const chunk of iterable) { + yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + } +} + +let ReadableStream +function ReadableStreamFrom (iterable) { + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + if (ReadableStream.from) { + return ReadableStream.from(convertIterableToBuffer(iterable)) + } + + let iterator + return new ReadableStream( + { + async start () { + iterator = iterable[Symbol.asyncIterator]() + }, + async pull (controller) { + const { done, value } = await iterator.next() + if (done) { + queueMicrotask(() => { + controller.close() + }) + } else { + const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) + controller.enqueue(new Uint8Array(buf)) + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + } + }, + 0 + ) +} + +// The chunk should be a FormData instance and contains +// all the required methods. +function isFormDataLike (object) { + return ( + object && + typeof object === 'object' && + typeof object.append === 'function' && + typeof object.delete === 'function' && + typeof object.get === 'function' && + typeof object.getAll === 'function' && + typeof object.has === 'function' && + typeof object.set === 'function' && + object[Symbol.toStringTag] === 'FormData' + ) +} + +function throwIfAborted (signal) { + if (!signal) { return } + if (typeof signal.throwIfAborted === 'function') { + signal.throwIfAborted() + } else { + if (signal.aborted) { + // DOMException not available < v17.0.0 + const err = new Error('The operation was aborted') + err.name = 'AbortError' + throw err + } + } +} + +function addAbortListener (signal, listener) { + if ('addEventListener' in signal) { + signal.addEventListener('abort', listener, { once: true }) + return () => signal.removeEventListener('abort', listener) + } + signal.addListener('abort', listener) + return () => signal.removeListener('abort', listener) +} + +const hasToWellFormed = !!String.prototype.toWellFormed + +/** + * @param {string} val + */ +function toUSVString (val) { + if (hasToWellFormed) { + return `${val}`.toWellFormed() + } else if (nodeUtil.toUSVString) { + return nodeUtil.toUSVString(val) + } + + return `${val}` +} + +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null +} + +const kEnumerableProperty = Object.create(null) +kEnumerableProperty.enumerable = true + +module.exports = { + kEnumerableProperty, + nop, + isDisturbed, + isErrored, + isReadable, + toUSVString, + isReadableAborted, + isBlobLike, + parseOrigin, + parseURL, + getServerName, + isStream, + isIterable, + isAsyncIterable, + isDestroyed, + parseRawHeaders, + parseHeaders, + parseKeepAliveTimeout, + destroy, + bodyLength, + deepClone, + ReadableStreamFrom, + isBuffer, + validateHandler, + getSocketInfo, + isFormDataLike, + buildURL, + throwIfAborted, + addAbortListener, + parseRangeHeader, + nodeMajor, + nodeMinor, + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +} diff --git a/node_modules/undici/lib/dispatcher-base.js b/node_modules/undici/lib/dispatcher-base.js new file mode 100644 index 0000000..5c0220b --- /dev/null +++ b/node_modules/undici/lib/dispatcher-base.js @@ -0,0 +1,192 @@ +'use strict' + +const Dispatcher = require('./dispatcher') +const { + ClientDestroyedError, + ClientClosedError, + InvalidArgumentError +} = require('./core/errors') +const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols') + +const kDestroyed = Symbol('destroyed') +const kClosed = Symbol('closed') +const kOnDestroyed = Symbol('onDestroyed') +const kOnClosed = Symbol('onClosed') +const kInterceptedDispatch = Symbol('Intercepted Dispatch') + +class DispatcherBase extends Dispatcher { + constructor () { + super() + + this[kDestroyed] = false + this[kOnDestroyed] = null + this[kClosed] = false + this[kOnClosed] = [] + } + + get destroyed () { + return this[kDestroyed] + } + + get closed () { + return this[kClosed] + } + + get interceptors () { + return this[kInterceptors] + } + + set interceptors (newInterceptors) { + if (newInterceptors) { + for (let i = newInterceptors.length - 1; i >= 0; i--) { + const interceptor = this[kInterceptors][i] + if (typeof interceptor !== 'function') { + throw new InvalidArgumentError('interceptor must be an function') + } + } + } + + this[kInterceptors] = newInterceptors + } + + close (callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.close((err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (this[kDestroyed]) { + queueMicrotask(() => callback(new ClientDestroyedError(), null)) + return + } + + if (this[kClosed]) { + if (this[kOnClosed]) { + this[kOnClosed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } + + this[kClosed] = true + this[kOnClosed].push(callback) + + const onClosed = () => { + const callbacks = this[kOnClosed] + this[kOnClosed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kClose]() + .then(() => this.destroy()) + .then(() => { + queueMicrotask(onClosed) + }) + } + + destroy (err, callback) { + if (typeof err === 'function') { + callback = err + err = null + } + + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.destroy(err, (err, data) => { + return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) + }) + }) + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (this[kDestroyed]) { + if (this[kOnDestroyed]) { + this[kOnDestroyed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } + + if (!err) { + err = new ClientDestroyedError() + } + + this[kDestroyed] = true + this[kOnDestroyed] = this[kOnDestroyed] || [] + this[kOnDestroyed].push(callback) + + const onDestroyed = () => { + const callbacks = this[kOnDestroyed] + this[kOnDestroyed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kDestroy](err).then(() => { + queueMicrotask(onDestroyed) + }) + } + + [kInterceptedDispatch] (opts, handler) { + if (!this[kInterceptors] || this[kInterceptors].length === 0) { + this[kInterceptedDispatch] = this[kDispatch] + return this[kDispatch](opts, handler) + } + + let dispatch = this[kDispatch].bind(this) + for (let i = this[kInterceptors].length - 1; i >= 0; i--) { + dispatch = this[kInterceptors][i](dispatch) + } + this[kInterceptedDispatch] = dispatch + return dispatch(opts, handler) + } + + dispatch (opts, handler) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } + + try { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object.') + } + + if (this[kDestroyed] || this[kOnDestroyed]) { + throw new ClientDestroyedError() + } + + if (this[kClosed]) { + throw new ClientClosedError() + } + + return this[kInterceptedDispatch](opts, handler) + } catch (err) { + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + handler.onError(err) + + return false + } + } +} + +module.exports = DispatcherBase diff --git a/node_modules/undici/lib/dispatcher.js b/node_modules/undici/lib/dispatcher.js new file mode 100644 index 0000000..9b809d8 --- /dev/null +++ b/node_modules/undici/lib/dispatcher.js @@ -0,0 +1,19 @@ +'use strict' + +const EventEmitter = require('events') + +class Dispatcher extends EventEmitter { + dispatch () { + throw new Error('not implemented') + } + + close () { + throw new Error('not implemented') + } + + destroy () { + throw new Error('not implemented') + } +} + +module.exports = Dispatcher diff --git a/node_modules/undici/lib/fetch/LICENSE b/node_modules/undici/lib/fetch/LICENSE new file mode 100644 index 0000000..2943500 --- /dev/null +++ b/node_modules/undici/lib/fetch/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Ethan Arrowood + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/undici/lib/fetch/body.js b/node_modules/undici/lib/fetch/body.js new file mode 100644 index 0000000..fd8481b --- /dev/null +++ b/node_modules/undici/lib/fetch/body.js @@ -0,0 +1,605 @@ +'use strict' + +const Busboy = require('@fastify/busboy') +const util = require('../core/util') +const { + ReadableStreamFrom, + isBlobLike, + isReadableStreamLike, + readableStreamClose, + createDeferredPromise, + fullyReadBody +} = require('./util') +const { FormData } = require('./formdata') +const { kState } = require('./symbols') +const { webidl } = require('./webidl') +const { DOMException, structuredClone } = require('./constants') +const { Blob, File: NativeFile } = require('buffer') +const { kBodyUsed } = require('../core/symbols') +const assert = require('assert') +const { isErrored } = require('../core/util') +const { isUint8Array, isArrayBuffer } = require('util/types') +const { File: UndiciFile } = require('./file') +const { parseMIMEType, serializeAMimeType } = require('./dataURL') + +let ReadableStream = globalThis.ReadableStream + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() + +// https://fetch.spec.whatwg.org/#concept-bodyinit-extract +function extractBody (object, keepalive = false) { + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + // 1. Let stream be null. + let stream = null + + // 2. If object is a ReadableStream object, then set stream to object. + if (object instanceof ReadableStream) { + stream = object + } else if (isBlobLike(object)) { + // 3. Otherwise, if object is a Blob object, set stream to the + // result of running object’s get stream. + stream = object.stream() + } else { + // 4. Otherwise, set stream to a new ReadableStream object, and set + // up stream. + stream = new ReadableStream({ + async pull (controller) { + controller.enqueue( + typeof source === 'string' ? textEncoder.encode(source) : source + ) + queueMicrotask(() => readableStreamClose(controller)) + }, + start () {}, + type: undefined + }) + } + + // 5. Assert: stream is a ReadableStream object. + assert(isReadableStreamLike(stream)) + + // 6. Let action be null. + let action = null + + // 7. Let source be null. + let source = null + + // 8. Let length be null. + let length = null + + // 9. Let type be null. + let type = null + + // 10. Switch on object: + if (typeof object === 'string') { + // Set source to the UTF-8 encoding of object. + // Note: setting source to a Uint8Array here breaks some mocking assumptions. + source = object + + // Set type to `text/plain;charset=UTF-8`. + type = 'text/plain;charset=UTF-8' + } else if (object instanceof URLSearchParams) { + // URLSearchParams + + // spec says to run application/x-www-form-urlencoded on body.list + // this is implemented in Node.js as apart of an URLSearchParams instance toString method + // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 + // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 + + // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. + source = object.toString() + + // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. + type = 'application/x-www-form-urlencoded;charset=UTF-8' + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) + } else if (util.isFormDataLike(object)) { + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const prefix = `--${boundary}\r\nContent-Disposition: form-data` + + /*! formdata-polyfill. MIT License. Jimmy Wärting */ + const escape = (str) => + str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') + const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') + + // Set action to this step: run the multipart/form-data + // encoding algorithm, with object’s entry list and UTF-8. + // - This ensures that the body is immutable and can't be changed afterwords + // - That the content-length is calculated in advance. + // - And that all parts are pre-encoded and ready to be sent. + + const blobParts = [] + const rn = new Uint8Array([13, 10]) // '\r\n' + length = 0 + let hasUnknownSizeValue = false + + for (const [name, value] of object) { + if (typeof value === 'string') { + const chunk = textEncoder.encode(prefix + + `; name="${escape(normalizeLinefeeds(name))}"` + + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) + blobParts.push(chunk) + length += chunk.byteLength + } else { + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + + `Content-Type: ${ + value.type || 'application/octet-stream' + }\r\n\r\n`) + blobParts.push(chunk, value, rn) + if (typeof value.size === 'number') { + length += chunk.byteLength + value.size + rn.byteLength + } else { + hasUnknownSizeValue = true + } + } + } + + const chunk = textEncoder.encode(`--${boundary}--`) + blobParts.push(chunk) + length += chunk.byteLength + if (hasUnknownSizeValue) { + length = null + } + + // Set source to object. + source = object + + action = async function * () { + for (const part of blobParts) { + if (part.stream) { + yield * part.stream() + } else { + yield part + } + } + } + + // Set type to `multipart/form-data; boundary=`, + // followed by the multipart/form-data boundary string generated + // by the multipart/form-data encoding algorithm. + type = 'multipart/form-data; boundary=' + boundary + } else if (isBlobLike(object)) { + // Blob + + // Set source to object. + source = object + + // Set length to object’s size. + length = object.size + + // If object’s type attribute is not the empty byte sequence, set + // type to its value. + if (object.type) { + type = object.type + } + } else if (typeof object[Symbol.asyncIterator] === 'function') { + // If keepalive is true, then throw a TypeError. + if (keepalive) { + throw new TypeError('keepalive') + } + + // If object is disturbed or locked, then throw a TypeError. + if (util.isDisturbed(object) || object.locked) { + throw new TypeError( + 'Response body object should not be disturbed or locked' + ) + } + + stream = + object instanceof ReadableStream ? object : ReadableStreamFrom(object) + } + + // 11. If source is a byte sequence, then set action to a + // step that returns source and length to source’s length. + if (typeof source === 'string' || util.isBuffer(source)) { + length = Buffer.byteLength(source) + } + + // 12. If action is non-null, then run these steps in in parallel: + if (action != null) { + // Run action. + let iterator + stream = new ReadableStream({ + async start () { + iterator = action(object)[Symbol.asyncIterator]() + }, + async pull (controller) { + const { value, done } = await iterator.next() + if (done) { + // When running action is done, close stream. + queueMicrotask(() => { + controller.close() + }) + } else { + // Whenever one or more bytes are available and stream is not errored, + // enqueue a Uint8Array wrapping an ArrayBuffer containing the available + // bytes into stream. + if (!isErrored(stream)) { + controller.enqueue(new Uint8Array(value)) + } + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + }, + type: undefined + }) + } + + // 13. Let body be a body whose stream is stream, source is source, + // and length is length. + const body = { stream, source, length } + + // 14. Return (body, type). + return [body, type] +} + +// https://fetch.spec.whatwg.org/#bodyinit-safely-extract +function safelyExtractBody (object, keepalive = false) { + if (!ReadableStream) { + // istanbul ignore next + ReadableStream = require('stream/web').ReadableStream + } + + // To safely extract a body and a `Content-Type` value from + // a byte sequence or BodyInit object object, run these steps: + + // 1. If object is a ReadableStream object, then: + if (object instanceof ReadableStream) { + // Assert: object is neither disturbed nor locked. + // istanbul ignore next + assert(!util.isDisturbed(object), 'The body has already been consumed.') + // istanbul ignore next + assert(!object.locked, 'The stream is locked.') + } + + // 2. Return the results of extracting object. + return extractBody(object, keepalive) +} + +function cloneBody (body) { + // To clone a body body, run these steps: + + // https://fetch.spec.whatwg.org/#concept-body-clone + + // 1. Let « out1, out2 » be the result of teeing body’s stream. + const [out1, out2] = body.stream.tee() + const out2Clone = structuredClone(out2, { transfer: [out2] }) + // This, for whatever reasons, unrefs out2Clone which allows + // the process to exit by itself. + const [, finalClone] = out2Clone.tee() + + // 2. Set body’s stream to out1. + body.stream = out1 + + // 3. Return a body whose stream is out2 and other members are copied from body. + return { + stream: finalClone, + length: body.length, + source: body.source + } +} + +async function * consumeBody (body) { + if (body) { + if (isUint8Array(body)) { + yield body + } else { + const stream = body.stream + + if (util.isDisturbed(stream)) { + throw new TypeError('The body has already been consumed.') + } + + if (stream.locked) { + throw new TypeError('The stream is locked.') + } + + // Compat. + stream[kBodyUsed] = true + + yield * stream + } + } +} + +function throwIfAborted (state) { + if (state.aborted) { + throw new DOMException('The operation was aborted.', 'AbortError') + } +} + +function bodyMixinMethods (instance) { + const methods = { + blob () { + // The blob() method steps are to return the result of + // running consume body with this and the following step + // given a byte sequence bytes: return a Blob whose + // contents are bytes and whose type attribute is this’s + // MIME type. + return specConsumeBody(this, (bytes) => { + let mimeType = bodyMimeType(this) + + if (mimeType === 'failure') { + mimeType = '' + } else if (mimeType) { + mimeType = serializeAMimeType(mimeType) + } + + // Return a Blob whose contents are bytes and type attribute + // is mimeType. + return new Blob([bytes], { type: mimeType }) + }, instance) + }, + + arrayBuffer () { + // The arrayBuffer() method steps are to return the result + // of running consume body with this and the following step + // given a byte sequence bytes: return a new ArrayBuffer + // whose contents are bytes. + return specConsumeBody(this, (bytes) => { + return new Uint8Array(bytes).buffer + }, instance) + }, + + text () { + // The text() method steps are to return the result of running + // consume body with this and UTF-8 decode. + return specConsumeBody(this, utf8DecodeBytes, instance) + }, + + json () { + // The json() method steps are to return the result of running + // consume body with this and parse JSON from bytes. + return specConsumeBody(this, parseJSONFromBytes, instance) + }, + + async formData () { + webidl.brandCheck(this, instance) + + throwIfAborted(this[kState]) + + const contentType = this.headers.get('Content-Type') + + // If mimeType’s essence is "multipart/form-data", then: + if (/multipart\/form-data/.test(contentType)) { + const headers = {} + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value + + const responseFormData = new FormData() + + let busboy + + try { + busboy = new Busboy({ + headers, + preservePath: true + }) + } catch (err) { + throw new DOMException(`${err}`, 'AbortError') + } + + busboy.on('field', (name, value) => { + responseFormData.append(name, value) + }) + busboy.on('file', (name, value, filename, encoding, mimeType) => { + const chunks = [] + + if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { + let base64chunk = '' + + value.on('data', (chunk) => { + base64chunk += chunk.toString().replace(/[\r\n]/gm, '') + + const end = base64chunk.length - base64chunk.length % 4 + chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) + + base64chunk = base64chunk.slice(end) + }) + value.on('end', () => { + chunks.push(Buffer.from(base64chunk, 'base64')) + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } else { + value.on('data', (chunk) => { + chunks.push(chunk) + }) + value.on('end', () => { + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } + }) + + const busboyResolve = new Promise((resolve, reject) => { + busboy.on('finish', resolve) + busboy.on('error', (err) => reject(new TypeError(err))) + }) + + if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) + busboy.end() + await busboyResolve + + return responseFormData + } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: + + // 1. Let entries be the result of parsing bytes. + let entries + try { + let text = '' + // application/x-www-form-urlencoded parser will keep the BOM. + // https://url.spec.whatwg.org/#concept-urlencoded-parser + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + + for await (const chunk of consumeBody(this[kState].body)) { + if (!isUint8Array(chunk)) { + throw new TypeError('Expected Uint8Array chunk') + } + text += streamingDecoder.decode(chunk, { stream: true }) + } + text += streamingDecoder.decode() + entries = new URLSearchParams(text) + } catch (err) { + // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. + // 2. If entries is failure, then throw a TypeError. + throw Object.assign(new TypeError(), { cause: err }) + } + + // 3. Return a new FormData object whose entries are entries. + const formData = new FormData() + for (const [name, value] of entries) { + formData.append(name, value) + } + return formData + } else { + // Wait a tick before checking if the request has been aborted. + // Otherwise, a TypeError can be thrown when an AbortError should. + await Promise.resolve() + + throwIfAborted(this[kState]) + + // Otherwise, throw a TypeError. + throw webidl.errors.exception({ + header: `${instance.name}.formData`, + message: 'Could not parse content as FormData.' + }) + } + } + } + + return methods +} + +function mixinBody (prototype) { + Object.assign(prototype.prototype, bodyMixinMethods(prototype)) +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-body-consume-body + * @param {Response|Request} object + * @param {(value: unknown) => unknown} convertBytesToJSValue + * @param {Response|Request} instance + */ +async function specConsumeBody (object, convertBytesToJSValue, instance) { + webidl.brandCheck(object, instance) + + throwIfAborted(object[kState]) + + // 1. If object is unusable, then return a promise rejected + // with a TypeError. + if (bodyUnusable(object[kState].body)) { + throw new TypeError('Body is unusable') + } + + // 2. Let promise be a new promise. + const promise = createDeferredPromise() + + // 3. Let errorSteps given error be to reject promise with error. + const errorSteps = (error) => promise.reject(error) + + // 4. Let successSteps given a byte sequence data be to resolve + // promise with the result of running convertBytesToJSValue + // with data. If that threw an exception, then run errorSteps + // with that exception. + const successSteps = (data) => { + try { + promise.resolve(convertBytesToJSValue(data)) + } catch (e) { + errorSteps(e) + } + } + + // 5. If object’s body is null, then run successSteps with an + // empty byte sequence. + if (object[kState].body == null) { + successSteps(new Uint8Array()) + return promise.promise + } + + // 6. Otherwise, fully read object’s body given successSteps, + // errorSteps, and object’s relevant global object. + await fullyReadBody(object[kState].body, successSteps, errorSteps) + + // 7. Return promise. + return promise.promise +} + +// https://fetch.spec.whatwg.org/#body-unusable +function bodyUnusable (body) { + // An object including the Body interface mixin is + // said to be unusable if its body is non-null and + // its body’s stream is disturbed or locked. + return body != null && (body.stream.locked || util.isDisturbed(body.stream)) +} + +/** + * @see https://encoding.spec.whatwg.org/#utf-8-decode + * @param {Buffer} buffer + */ +function utf8DecodeBytes (buffer) { + if (buffer.length === 0) { + return '' + } + + // 1. Let buffer be the result of peeking three bytes from + // ioQueue, converted to a byte sequence. + + // 2. If buffer is 0xEF 0xBB 0xBF, then read three + // bytes from ioQueue. (Do nothing with those bytes.) + if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { + buffer = buffer.subarray(3) + } + + // 3. Process a queue with an instance of UTF-8’s + // decoder, ioQueue, output, and "replacement". + const output = textDecoder.decode(buffer) + + // 4. Return output. + return output +} + +/** + * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value + * @param {Uint8Array} bytes + */ +function parseJSONFromBytes (bytes) { + return JSON.parse(utf8DecodeBytes(bytes)) +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-body-mime-type + * @param {import('./response').Response|import('./request').Request} object + */ +function bodyMimeType (object) { + const { headersList } = object[kState] + const contentType = headersList.get('content-type') + + if (contentType === null) { + return 'failure' + } + + return parseMIMEType(contentType) +} + +module.exports = { + extractBody, + safelyExtractBody, + cloneBody, + mixinBody +} diff --git a/node_modules/undici/lib/fetch/constants.js b/node_modules/undici/lib/fetch/constants.js new file mode 100644 index 0000000..218fcbe --- /dev/null +++ b/node_modules/undici/lib/fetch/constants.js @@ -0,0 +1,151 @@ +'use strict' + +const { MessageChannel, receiveMessageOnPort } = require('worker_threads') + +const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) + +const nullBodyStatus = [101, 204, 205, 304] + +const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) + +// https://fetch.spec.whatwg.org/#block-bad-port +const badPorts = [ + '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', + '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', + '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', + '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', + '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', + '10080' +] + +const badPortsSet = new Set(badPorts) + +// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies +const referrerPolicy = [ + '', + 'no-referrer', + 'no-referrer-when-downgrade', + 'same-origin', + 'origin', + 'strict-origin', + 'origin-when-cross-origin', + 'strict-origin-when-cross-origin', + 'unsafe-url' +] +const referrerPolicySet = new Set(referrerPolicy) + +const requestRedirect = ['follow', 'manual', 'error'] + +const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) + +const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + +const requestCredentials = ['omit', 'same-origin', 'include'] + +const requestCache = [ + 'default', + 'no-store', + 'reload', + 'no-cache', + 'force-cache', + 'only-if-cached' +] + +// https://fetch.spec.whatwg.org/#request-body-header-name +const requestBodyHeader = [ + 'content-encoding', + 'content-language', + 'content-location', + 'content-type', + // See https://github.com/nodejs/undici/issues/2021 + // 'Content-Length' is a forbidden header name, which is typically + // removed in the Headers implementation. However, undici doesn't + // filter out headers, so we add it here. + 'content-length' +] + +// https://fetch.spec.whatwg.org/#enumdef-requestduplex +const requestDuplex = [ + 'half' +] + +// http://fetch.spec.whatwg.org/#forbidden-method +const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) + +const subresource = [ + 'audio', + 'audioworklet', + 'font', + 'image', + 'manifest', + 'paintworklet', + 'script', + 'style', + 'track', + 'video', + 'xslt', + '' +] +const subresourceSet = new Set(subresource) + +/** @type {globalThis['DOMException']} */ +const DOMException = globalThis.DOMException ?? (() => { + // DOMException was only made a global in Node v17.0.0, + // but fetch supports >= v16.8. + try { + atob('~') + } catch (err) { + return Object.getPrototypeOf(err).constructor + } +})() + +let channel + +/** @type {globalThis['structuredClone']} */ +const structuredClone = + globalThis.structuredClone ?? + // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js + // structuredClone was added in v17.0.0, but fetch supports v16.8 + function structuredClone (value, options = undefined) { + if (arguments.length === 0) { + throw new TypeError('missing argument') + } + + if (!channel) { + channel = new MessageChannel() + } + channel.port1.unref() + channel.port2.unref() + channel.port1.postMessage(value, options?.transfer) + return receiveMessageOnPort(channel.port2).message + } + +module.exports = { + DOMException, + structuredClone, + subresource, + forbiddenMethods, + requestBodyHeader, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + redirectStatus, + corsSafeListedMethods, + nullBodyStatus, + safeMethods, + badPorts, + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet +} diff --git a/node_modules/undici/lib/fetch/dataURL.js b/node_modules/undici/lib/fetch/dataURL.js new file mode 100644 index 0000000..7b6a606 --- /dev/null +++ b/node_modules/undici/lib/fetch/dataURL.js @@ -0,0 +1,627 @@ +const assert = require('assert') +const { atob } = require('buffer') +const { isomorphicDecode } = require('./util') + +const encoder = new TextEncoder() + +/** + * @see https://mimesniff.spec.whatwg.org/#http-token-code-point + */ +const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ +const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line +/** + * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point + */ +const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line + +// https://fetch.spec.whatwg.org/#data-url-processor +/** @param {URL} dataURL */ +function dataURLProcessor (dataURL) { + // 1. Assert: dataURL’s scheme is "data". + assert(dataURL.protocol === 'data:') + + // 2. Let input be the result of running the URL + // serializer on dataURL with exclude fragment + // set to true. + let input = URLSerializer(dataURL, true) + + // 3. Remove the leading "data:" string from input. + input = input.slice(5) + + // 4. Let position point at the start of input. + const position = { position: 0 } + + // 5. Let mimeType be the result of collecting a + // sequence of code points that are not equal + // to U+002C (,), given position. + let mimeType = collectASequenceOfCodePointsFast( + ',', + input, + position + ) + + // 6. Strip leading and trailing ASCII whitespace + // from mimeType. + // Undici implementation note: we need to store the + // length because if the mimetype has spaces removed, + // the wrong amount will be sliced from the input in + // step #9 + const mimeTypeLength = mimeType.length + mimeType = removeASCIIWhitespace(mimeType, true, true) + + // 7. If position is past the end of input, then + // return failure + if (position.position >= input.length) { + return 'failure' + } + + // 8. Advance position by 1. + position.position++ + + // 9. Let encodedBody be the remainder of input. + const encodedBody = input.slice(mimeTypeLength + 1) + + // 10. Let body be the percent-decoding of encodedBody. + let body = stringPercentDecode(encodedBody) + + // 11. If mimeType ends with U+003B (;), followed by + // zero or more U+0020 SPACE, followed by an ASCII + // case-insensitive match for "base64", then: + if (/;(\u0020){0,}base64$/i.test(mimeType)) { + // 1. Let stringBody be the isomorphic decode of body. + const stringBody = isomorphicDecode(body) + + // 2. Set body to the forgiving-base64 decode of + // stringBody. + body = forgivingBase64(stringBody) + + // 3. If body is failure, then return failure. + if (body === 'failure') { + return 'failure' + } + + // 4. Remove the last 6 code points from mimeType. + mimeType = mimeType.slice(0, -6) + + // 5. Remove trailing U+0020 SPACE code points from mimeType, + // if any. + mimeType = mimeType.replace(/(\u0020)+$/, '') + + // 6. Remove the last U+003B (;) code point from mimeType. + mimeType = mimeType.slice(0, -1) + } + + // 12. If mimeType starts with U+003B (;), then prepend + // "text/plain" to mimeType. + if (mimeType.startsWith(';')) { + mimeType = 'text/plain' + mimeType + } + + // 13. Let mimeTypeRecord be the result of parsing + // mimeType. + let mimeTypeRecord = parseMIMEType(mimeType) + + // 14. If mimeTypeRecord is failure, then set + // mimeTypeRecord to text/plain;charset=US-ASCII. + if (mimeTypeRecord === 'failure') { + mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + } + + // 15. Return a new data: URL struct whose MIME + // type is mimeTypeRecord and body is body. + // https://fetch.spec.whatwg.org/#data-url-struct + return { mimeType: mimeTypeRecord, body } +} + +// https://url.spec.whatwg.org/#concept-url-serializer +/** + * @param {URL} url + * @param {boolean} excludeFragment + */ +function URLSerializer (url, excludeFragment = false) { + if (!excludeFragment) { + return url.href + } + + const href = url.href + const hashLength = url.hash.length + + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) +} + +// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points +/** + * @param {(char: string) => boolean} condition + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePoints (condition, input, position) { + // 1. Let result be the empty string. + let result = '' + + // 2. While position doesn’t point past the end of input and the + // code point at position within input meets the condition condition: + while (position.position < input.length && condition(input[position.position])) { + // 1. Append that code point to the end of result. + result += input[position.position] + + // 2. Advance position by 1. + position.position++ + } + + // 3. Return result. + return result +} + +/** + * A faster collectASequenceOfCodePoints that only works when comparing a single character. + * @param {string} char + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePointsFast (char, input, position) { + const idx = input.indexOf(char, position.position) + const start = position.position + + if (idx === -1) { + position.position = input.length + return input.slice(start) + } + + position.position = idx + return input.slice(start, position.position) +} + +// https://url.spec.whatwg.org/#string-percent-decode +/** @param {string} input */ +function stringPercentDecode (input) { + // 1. Let bytes be the UTF-8 encoding of input. + const bytes = encoder.encode(input) + + // 2. Return the percent-decoding of bytes. + return percentDecode(bytes) +} + +// https://url.spec.whatwg.org/#percent-decode +/** @param {Uint8Array} input */ +function percentDecode (input) { + // 1. Let output be an empty byte sequence. + /** @type {number[]} */ + const output = [] + + // 2. For each byte byte in input: + for (let i = 0; i < input.length; i++) { + const byte = input[i] + + // 1. If byte is not 0x25 (%), then append byte to output. + if (byte !== 0x25) { + output.push(byte) + + // 2. Otherwise, if byte is 0x25 (%) and the next two bytes + // after byte in input are not in the ranges + // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), + // and 0x61 (a) to 0x66 (f), all inclusive, append byte + // to output. + } else if ( + byte === 0x25 && + !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) + ) { + output.push(0x25) + + // 3. Otherwise: + } else { + // 1. Let bytePoint be the two bytes after byte in input, + // decoded, and then interpreted as hexadecimal number. + const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) + const bytePoint = Number.parseInt(nextTwoBytes, 16) + + // 2. Append a byte whose value is bytePoint to output. + output.push(bytePoint) + + // 3. Skip the next two bytes in input. + i += 2 + } + } + + // 3. Return output. + return Uint8Array.from(output) +} + +// https://mimesniff.spec.whatwg.org/#parse-a-mime-type +/** @param {string} input */ +function parseMIMEType (input) { + // 1. Remove any leading and trailing HTTP whitespace + // from input. + input = removeHTTPWhitespace(input, true, true) + + // 2. Let position be a position variable for input, + // initially pointing at the start of input. + const position = { position: 0 } + + // 3. Let type be the result of collecting a sequence + // of code points that are not U+002F (/) from + // input, given position. + const type = collectASequenceOfCodePointsFast( + '/', + input, + position + ) + + // 4. If type is the empty string or does not solely + // contain HTTP token code points, then return failure. + // https://mimesniff.spec.whatwg.org/#http-token-code-point + if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { + return 'failure' + } + + // 5. If position is past the end of input, then return + // failure + if (position.position > input.length) { + return 'failure' + } + + // 6. Advance position by 1. (This skips past U+002F (/).) + position.position++ + + // 7. Let subtype be the result of collecting a sequence of + // code points that are not U+003B (;) from input, given + // position. + let subtype = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 8. Remove any trailing HTTP whitespace from subtype. + subtype = removeHTTPWhitespace(subtype, false, true) + + // 9. If subtype is the empty string or does not solely + // contain HTTP token code points, then return failure. + if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { + return 'failure' + } + + const typeLowercase = type.toLowerCase() + const subtypeLowercase = subtype.toLowerCase() + + // 10. Let mimeType be a new MIME type record whose type + // is type, in ASCII lowercase, and subtype is subtype, + // in ASCII lowercase. + // https://mimesniff.spec.whatwg.org/#mime-type + const mimeType = { + type: typeLowercase, + subtype: subtypeLowercase, + /** @type {Map} */ + parameters: new Map(), + // https://mimesniff.spec.whatwg.org/#mime-type-essence + essence: `${typeLowercase}/${subtypeLowercase}` + } + + // 11. While position is not past the end of input: + while (position.position < input.length) { + // 1. Advance position by 1. (This skips past U+003B (;).) + position.position++ + + // 2. Collect a sequence of code points that are HTTP + // whitespace from input given position. + collectASequenceOfCodePoints( + // https://fetch.spec.whatwg.org/#http-whitespace + char => HTTP_WHITESPACE_REGEX.test(char), + input, + position + ) + + // 3. Let parameterName be the result of collecting a + // sequence of code points that are not U+003B (;) + // or U+003D (=) from input, given position. + let parameterName = collectASequenceOfCodePoints( + (char) => char !== ';' && char !== '=', + input, + position + ) + + // 4. Set parameterName to parameterName, in ASCII + // lowercase. + parameterName = parameterName.toLowerCase() + + // 5. If position is not past the end of input, then: + if (position.position < input.length) { + // 1. If the code point at position within input is + // U+003B (;), then continue. + if (input[position.position] === ';') { + continue + } + + // 2. Advance position by 1. (This skips past U+003D (=).) + position.position++ + } + + // 6. If position is past the end of input, then break. + if (position.position > input.length) { + break + } + + // 7. Let parameterValue be null. + let parameterValue = null + + // 8. If the code point at position within input is + // U+0022 ("), then: + if (input[position.position] === '"') { + // 1. Set parameterValue to the result of collecting + // an HTTP quoted string from input, given position + // and the extract-value flag. + parameterValue = collectAnHTTPQuotedString(input, position, true) + + // 2. Collect a sequence of code points that are not + // U+003B (;) from input, given position. + collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 9. Otherwise: + } else { + // 1. Set parameterValue to the result of collecting + // a sequence of code points that are not U+003B (;) + // from input, given position. + parameterValue = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 2. Remove any trailing HTTP whitespace from parameterValue. + parameterValue = removeHTTPWhitespace(parameterValue, false, true) + + // 3. If parameterValue is the empty string, then continue. + if (parameterValue.length === 0) { + continue + } + } + + // 10. If all of the following are true + // - parameterName is not the empty string + // - parameterName solely contains HTTP token code points + // - parameterValue solely contains HTTP quoted-string token code points + // - mimeType’s parameters[parameterName] does not exist + // then set mimeType’s parameters[parameterName] to parameterValue. + if ( + parameterName.length !== 0 && + HTTP_TOKEN_CODEPOINTS.test(parameterName) && + (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && + !mimeType.parameters.has(parameterName) + ) { + mimeType.parameters.set(parameterName, parameterValue) + } + } + + // 12. Return mimeType. + return mimeType +} + +// https://infra.spec.whatwg.org/#forgiving-base64-decode +/** @param {string} data */ +function forgivingBase64 (data) { + // 1. Remove all ASCII whitespace from data. + data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line + + // 2. If data’s code point length divides by 4 leaving + // no remainder, then: + if (data.length % 4 === 0) { + // 1. If data ends with one or two U+003D (=) code points, + // then remove them from data. + data = data.replace(/=?=$/, '') + } + + // 3. If data’s code point length divides by 4 leaving + // a remainder of 1, then return failure. + if (data.length % 4 === 1) { + return 'failure' + } + + // 4. If data contains a code point that is not one of + // U+002B (+) + // U+002F (/) + // ASCII alphanumeric + // then return failure. + if (/[^+/0-9A-Za-z]/.test(data)) { + return 'failure' + } + + const binary = atob(data) + const bytes = new Uint8Array(binary.length) + + for (let byte = 0; byte < binary.length; byte++) { + bytes[byte] = binary.charCodeAt(byte) + } + + return bytes +} + +// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string +// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string +/** + * @param {string} input + * @param {{ position: number }} position + * @param {boolean?} extractValue + */ +function collectAnHTTPQuotedString (input, position, extractValue) { + // 1. Let positionStart be position. + const positionStart = position.position + + // 2. Let value be the empty string. + let value = '' + + // 3. Assert: the code point at position within input + // is U+0022 ("). + assert(input[position.position] === '"') + + // 4. Advance position by 1. + position.position++ + + // 5. While true: + while (true) { + // 1. Append the result of collecting a sequence of code points + // that are not U+0022 (") or U+005C (\) from input, given + // position, to value. + value += collectASequenceOfCodePoints( + (char) => char !== '"' && char !== '\\', + input, + position + ) + + // 2. If position is past the end of input, then break. + if (position.position >= input.length) { + break + } + + // 3. Let quoteOrBackslash be the code point at position within + // input. + const quoteOrBackslash = input[position.position] + + // 4. Advance position by 1. + position.position++ + + // 5. If quoteOrBackslash is U+005C (\), then: + if (quoteOrBackslash === '\\') { + // 1. If position is past the end of input, then append + // U+005C (\) to value and break. + if (position.position >= input.length) { + value += '\\' + break + } + + // 2. Append the code point at position within input to value. + value += input[position.position] + + // 3. Advance position by 1. + position.position++ + + // 6. Otherwise: + } else { + // 1. Assert: quoteOrBackslash is U+0022 ("). + assert(quoteOrBackslash === '"') + + // 2. Break. + break + } + } + + // 6. If the extract-value flag is set, then return value. + if (extractValue) { + return value + } + + // 7. Return the code points from positionStart to position, + // inclusive, within input. + return input.slice(positionStart, position.position) +} + +/** + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +function serializeAMimeType (mimeType) { + assert(mimeType !== 'failure') + const { parameters, essence } = mimeType + + // 1. Let serialization be the concatenation of mimeType’s + // type, U+002F (/), and mimeType’s subtype. + let serialization = essence + + // 2. For each name → value of mimeType’s parameters: + for (let [name, value] of parameters.entries()) { + // 1. Append U+003B (;) to serialization. + serialization += ';' + + // 2. Append name to serialization. + serialization += name + + // 3. Append U+003D (=) to serialization. + serialization += '=' + + // 4. If value does not solely contain HTTP token code + // points or value is the empty string, then: + if (!HTTP_TOKEN_CODEPOINTS.test(value)) { + // 1. Precede each occurence of U+0022 (") or + // U+005C (\) in value with U+005C (\). + value = value.replace(/(\\|")/g, '\\$1') + + // 2. Prepend U+0022 (") to value. + value = '"' + value + + // 3. Append U+0022 (") to value. + value += '"' + } + + // 5. Append value to serialization. + serialization += value + } + + // 3. Return serialization. + return serialization +} + +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} char + */ +function isHTTPWhiteSpace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === ' ' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} str + */ +function removeHTTPWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 + + if (leading) { + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); + } + + if (trailing) { + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); + } + + return str.slice(lead, trail + 1) +} + +/** + * @see https://infra.spec.whatwg.org/#ascii-whitespace + * @param {string} char + */ +function isASCIIWhitespace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' +} + +/** + * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace + */ +function removeASCIIWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 + + if (leading) { + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); + } + + if (trailing) { + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); + } + + return str.slice(lead, trail + 1) +} + +module.exports = { + dataURLProcessor, + URLSerializer, + collectASequenceOfCodePoints, + collectASequenceOfCodePointsFast, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString, + serializeAMimeType +} diff --git a/node_modules/undici/lib/fetch/file.js b/node_modules/undici/lib/fetch/file.js new file mode 100644 index 0000000..3133d25 --- /dev/null +++ b/node_modules/undici/lib/fetch/file.js @@ -0,0 +1,344 @@ +'use strict' + +const { Blob, File: NativeFile } = require('buffer') +const { types } = require('util') +const { kState } = require('./symbols') +const { isBlobLike } = require('./util') +const { webidl } = require('./webidl') +const { parseMIMEType, serializeAMimeType } = require('./dataURL') +const { kEnumerableProperty } = require('../core/util') +const encoder = new TextEncoder() + +class File extends Blob { + constructor (fileBits, fileName, options = {}) { + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + + fileBits = webidl.converters['sequence'](fileBits) + fileName = webidl.converters.USVString(fileName) + options = webidl.converters.FilePropertyBag(options) + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + // Note: Blob handles this for us + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // 2. Convert every character in t to ASCII lowercase. + let t = options.type + let d + + // eslint-disable-next-line no-labels + substep: { + if (t) { + t = parseMIMEType(t) + + if (t === 'failure') { + t = '' + // eslint-disable-next-line no-labels + break substep + } + + t = serializeAMimeType(t).toLowerCase() + } + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + d = options.lastModified + } + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + super(processBlobParts(fileBits, options), { type: t }) + this[kState] = { + name: n, + lastModified: d, + type: t + } + } + + get name () { + webidl.brandCheck(this, File) + + return this[kState].name + } + + get lastModified () { + webidl.brandCheck(this, File) + + return this[kState].lastModified + } + + get type () { + webidl.brandCheck(this, File) + + return this[kState].type + } +} + +class FileLike { + constructor (blobLike, fileName, options = {}) { + // TODO: argument idl type check + + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // TODO + const t = options.type + + // 2. Convert every character in t to ASCII lowercase. + // TODO + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + const d = options.lastModified ?? Date.now() + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + this[kState] = { + blobLike, + name: n, + type: t, + lastModified: d + } + } + + stream (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.stream(...args) + } + + arrayBuffer (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.arrayBuffer(...args) + } + + slice (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.slice(...args) + } + + text (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.text(...args) + } + + get size () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.size + } + + get type () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.type + } + + get name () { + webidl.brandCheck(this, FileLike) + + return this[kState].name + } + + get lastModified () { + webidl.brandCheck(this, FileLike) + + return this[kState].lastModified + } + + get [Symbol.toStringTag] () { + return 'File' + } +} + +Object.defineProperties(File.prototype, { + [Symbol.toStringTag]: { + value: 'File', + configurable: true + }, + name: kEnumerableProperty, + lastModified: kEnumerableProperty +}) + +webidl.converters.Blob = webidl.interfaceConverter(Blob) + +webidl.converters.BlobPart = function (V, opts) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if ( + ArrayBuffer.isView(V) || + types.isAnyArrayBuffer(V) + ) { + return webidl.converters.BufferSource(V, opts) + } + } + + return webidl.converters.USVString(V, opts) +} + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.BlobPart +) + +// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag +webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ + { + key: 'lastModified', + converter: webidl.converters['long long'], + get defaultValue () { + return Date.now() + } + }, + { + key: 'type', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'endings', + converter: (value) => { + value = webidl.converters.DOMString(value) + value = value.toLowerCase() + + if (value !== 'native') { + value = 'transparent' + } + + return value + }, + defaultValue: 'transparent' + } +]) + +/** + * @see https://www.w3.org/TR/FileAPI/#process-blob-parts + * @param {(NodeJS.TypedArray|Blob|string)[]} parts + * @param {{ type: string, endings: string }} options + */ +function processBlobParts (parts, options) { + // 1. Let bytes be an empty sequence of bytes. + /** @type {NodeJS.TypedArray[]} */ + const bytes = [] + + // 2. For each element in parts: + for (const element of parts) { + // 1. If element is a USVString, run the following substeps: + if (typeof element === 'string') { + // 1. Let s be element. + let s = element + + // 2. If the endings member of options is "native", set s + // to the result of converting line endings to native + // of element. + if (options.endings === 'native') { + s = convertLineEndingsNative(s) + } + + // 3. Append the result of UTF-8 encoding s to bytes. + bytes.push(encoder.encode(s)) + } else if ( + types.isAnyArrayBuffer(element) || + types.isTypedArray(element) + ) { + // 2. If element is a BufferSource, get a copy of the + // bytes held by the buffer source, and append those + // bytes to bytes. + if (!element.buffer) { // ArrayBuffer + bytes.push(new Uint8Array(element)) + } else { + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) + } + } else if (isBlobLike(element)) { + // 3. If element is a Blob, append the bytes it represents + // to bytes. + bytes.push(element) + } + } + + // 3. Return bytes. + return bytes +} + +/** + * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native + * @param {string} s + */ +function convertLineEndingsNative (s) { + // 1. Let native line ending be be the code point U+000A LF. + let nativeLineEnding = '\n' + + // 2. If the underlying platform’s conventions are to + // represent newlines as a carriage return and line feed + // sequence, set native line ending to the code point + // U+000D CR followed by the code point U+000A LF. + if (process.platform === 'win32') { + nativeLineEnding = '\r\n' + } + + return s.replace(/\r?\n/g, nativeLineEnding) +} + +// If this function is moved to ./util.js, some tools (such as +// rollup) will warn about circular dependencies. See: +// https://github.com/nodejs/undici/issues/1629 +function isFileLike (object) { + return ( + (NativeFile && object instanceof NativeFile) || + object instanceof File || ( + object && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + object[Symbol.toStringTag] === 'File' + ) + ) +} + +module.exports = { File, FileLike, isFileLike } diff --git a/node_modules/undici/lib/fetch/formdata.js b/node_modules/undici/lib/fetch/formdata.js new file mode 100644 index 0000000..5975e26 --- /dev/null +++ b/node_modules/undici/lib/fetch/formdata.js @@ -0,0 +1,265 @@ +'use strict' + +const { isBlobLike, toUSVString, makeIterator } = require('./util') +const { kState } = require('./symbols') +const { File: UndiciFile, FileLike, isFileLike } = require('./file') +const { webidl } = require('./webidl') +const { Blob, File: NativeFile } = require('buffer') + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile + +// https://xhr.spec.whatwg.org/#formdata +class FormData { + constructor (form) { + if (form !== undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'FormData constructor', + argument: 'Argument 1', + types: ['undefined'] + }) + } + + this[kState] = [] + } + + append (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? webidl.converters.USVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with + // name, value, and filename if given. + const entry = makeEntry(name, value, filename) + + // 3. Append entry to this’s entry list. + this[kState].push(entry) + } + + delete (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + + name = webidl.converters.USVString(name) + + // The delete(name) method steps are to remove all entries whose name + // is name from this’s entry list. + this[kState] = this[kState].filter(entry => entry.name !== name) + } + + get (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return null. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx === -1) { + return null + } + + // 2. Return the value of the first entry whose name is name from + // this’s entry list. + return this[kState][idx].value + } + + getAll (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return the empty list. + // 2. Return the values of all entries whose name is name, in order, + // from this’s entry list. + return this[kState] + .filter((entry) => entry.name === name) + .map((entry) => entry.value) + } + + has (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) + + name = webidl.converters.USVString(name) + + // The has(name) method steps are to return true if there is an entry + // whose name is name in this’s entry list; otherwise false. + return this[kState].findIndex((entry) => entry.name === name) !== -1 + } + + set (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // The set(name, value) and set(name, blobValue, filename) method steps + // are: + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? toUSVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with name, value, and + // filename if given. + const entry = makeEntry(name, value, filename) + + // 3. If there are entries in this’s entry list whose name is name, then + // replace the first such entry with entry and remove the others. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx !== -1) { + this[kState] = [ + ...this[kState].slice(0, idx), + entry, + ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) + ] + } else { + // 4. Otherwise, append entry to this’s entry list. + this[kState].push(entry) + } + } + + entries () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key+value' + ) + } + + keys () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key' + ) + } + + values () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'value' + ) + } + + /** + * @param {(value: string, key: string, self: FormData) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." + ) + } + + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } + } +} + +FormData.prototype[Symbol.iterator] = FormData.prototype.entries + +Object.defineProperties(FormData.prototype, { + [Symbol.toStringTag]: { + value: 'FormData', + configurable: true + } +}) + +/** + * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry + * @param {string} name + * @param {string|Blob} value + * @param {?string} filename + * @returns + */ +function makeEntry (name, value, filename) { + // 1. Set name to the result of converting name into a scalar value string. + // "To convert a string into a scalar value string, replace any surrogates + // with U+FFFD." + // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end + name = Buffer.from(name).toString('utf8') + + // 2. If value is a string, then set value to the result of converting + // value into a scalar value string. + if (typeof value === 'string') { + value = Buffer.from(value).toString('utf8') + } else { + // 3. Otherwise: + + // 1. If value is not a File object, then set value to a new File object, + // representing the same bytes, whose name attribute value is "blob" + if (!isFileLike(value)) { + value = value instanceof Blob + ? new File([value], 'blob', { type: value.type }) + : new FileLike(value, 'blob', { type: value.type }) + } + + // 2. If filename is given, then set value to a new File object, + // representing the same bytes, whose name attribute is filename. + if (filename !== undefined) { + /** @type {FilePropertyBag} */ + const options = { + type: value.type, + lastModified: value.lastModified + } + + value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile + ? new File([value], filename, options) + : new FileLike(value, filename, options) + } + } + + // 4. Return an entry whose name is name and whose value is value. + return { name, value } +} + +module.exports = { FormData } diff --git a/node_modules/undici/lib/fetch/global.js b/node_modules/undici/lib/fetch/global.js new file mode 100644 index 0000000..1df6f12 --- /dev/null +++ b/node_modules/undici/lib/fetch/global.js @@ -0,0 +1,40 @@ +'use strict' + +// In case of breaking changes, increase the version +// number to avoid conflicts. +const globalOrigin = Symbol.for('undici.globalOrigin.1') + +function getGlobalOrigin () { + return globalThis[globalOrigin] +} + +function setGlobalOrigin (newOrigin) { + if (newOrigin === undefined) { + Object.defineProperty(globalThis, globalOrigin, { + value: undefined, + writable: true, + enumerable: false, + configurable: false + }) + + return + } + + const parsedURL = new URL(newOrigin) + + if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { + throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) + } + + Object.defineProperty(globalThis, globalOrigin, { + value: parsedURL, + writable: true, + enumerable: false, + configurable: false + }) +} + +module.exports = { + getGlobalOrigin, + setGlobalOrigin +} diff --git a/node_modules/undici/lib/fetch/headers.js b/node_modules/undici/lib/fetch/headers.js new file mode 100644 index 0000000..2f1c0be --- /dev/null +++ b/node_modules/undici/lib/fetch/headers.js @@ -0,0 +1,589 @@ +// https://github.com/Ethan-Arrowood/undici-fetch + +'use strict' + +const { kHeadersList, kConstruct } = require('../core/symbols') +const { kGuard } = require('./symbols') +const { kEnumerableProperty } = require('../core/util') +const { + makeIterator, + isValidHeaderName, + isValidHeaderValue +} = require('./util') +const { webidl } = require('./webidl') +const assert = require('assert') + +const kHeadersMap = Symbol('headers map') +const kHeadersSortedMap = Symbol('headers map sorted') + +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize + * @param {string} potentialValue + */ +function headerValueNormalize (potentialValue) { + // To normalize a byte sequence potentialValue, remove + // any leading and trailing HTTP whitespace bytes from + // potentialValue. + let i = 0; let j = potentialValue.length + + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i + + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) +} + +function fill (headers, object) { + // To fill a Headers object headers with a given object object, run these steps: + + // 1. If object is a sequence, then for each header in object: + // Note: webidl conversion to array has already been done. + if (Array.isArray(object)) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] + // 1. If header does not contain exactly two items, then throw a TypeError. + if (header.length !== 2) { + throw webidl.errors.exception({ + header: 'Headers constructor', + message: `expected name/value pair to be length 2, found ${header.length}.` + }) + } + + // 2. Append (header’s first item, header’s second item) to headers. + appendHeader(headers, header[0], header[1]) + } + } else if (typeof object === 'object' && object !== null) { + // Note: null should throw + + // 2. Otherwise, object is a record, then for each key → value in object, + // append (key, value) to headers + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) + } + } else { + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) + } +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } + + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } + + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) + + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} + +class HeadersList { + /** @type {[string, string][]|null} */ + cookies = null + + constructor (init) { + if (init instanceof HeadersList) { + this[kHeadersMap] = new Map(init[kHeadersMap]) + this[kHeadersSortedMap] = init[kHeadersSortedMap] + this.cookies = init.cookies === null ? null : [...init.cookies] + } else { + this[kHeadersMap] = new Map(init) + this[kHeadersSortedMap] = null + } + } + + // https://fetch.spec.whatwg.org/#header-list-contains + contains (name) { + // A header list list contains a header name name if list + // contains a header whose name is a byte-case-insensitive + // match for name. + name = name.toLowerCase() + + return this[kHeadersMap].has(name) + } + + clear () { + this[kHeadersMap].clear() + this[kHeadersSortedMap] = null + this.cookies = null + } + + // https://fetch.spec.whatwg.org/#concept-header-list-append + append (name, value) { + this[kHeadersSortedMap] = null + + // 1. If list contains name, then set name to the first such + // header’s name. + const lowercaseName = name.toLowerCase() + const exists = this[kHeadersMap].get(lowercaseName) + + // 2. Append (name, value) to list. + if (exists) { + const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' + this[kHeadersMap].set(lowercaseName, { + name: exists.name, + value: `${exists.value}${delimiter}${value}` + }) + } else { + this[kHeadersMap].set(lowercaseName, { name, value }) + } + + if (lowercaseName === 'set-cookie') { + this.cookies ??= [] + this.cookies.push(value) + } + } + + // https://fetch.spec.whatwg.org/#concept-header-list-set + set (name, value) { + this[kHeadersSortedMap] = null + const lowercaseName = name.toLowerCase() + + if (lowercaseName === 'set-cookie') { + this.cookies = [value] + } + + // 1. If list contains name, then set the value of + // the first such header to value and remove the + // others. + // 2. Otherwise, append header (name, value) to list. + this[kHeadersMap].set(lowercaseName, { name, value }) + } + + // https://fetch.spec.whatwg.org/#concept-header-list-delete + delete (name) { + this[kHeadersSortedMap] = null + + name = name.toLowerCase() + + if (name === 'set-cookie') { + this.cookies = null + } + + this[kHeadersMap].delete(name) + } + + // https://fetch.spec.whatwg.org/#concept-header-list-get + get (name) { + const value = this[kHeadersMap].get(name.toLowerCase()) + + // 1. If list does not contain name, then return null. + // 2. Return the values of all headers in list whose name + // is a byte-case-insensitive match for name, + // separated from each other by 0x2C 0x20, in order. + return value === undefined ? null : value.value + } + + * [Symbol.iterator] () { + // use the lowercased name + for (const [name, { value }] of this[kHeadersMap]) { + yield [name, value] + } + } + + get entries () { + const headers = {} + + if (this[kHeadersMap].size) { + for (const { name, value } of this[kHeadersMap].values()) { + headers[name] = value + } + } + + return headers + } +} + +// https://fetch.spec.whatwg.org/#headers-class +class Headers { + constructor (init = undefined) { + if (init === kConstruct) { + return + } + this[kHeadersList] = new HeadersList() + + // The new Headers(init) constructor steps are: + + // 1. Set this’s guard to "none". + this[kGuard] = 'none' + + // 2. If init is given, then fill this with init. + if (init !== undefined) { + init = webidl.converters.HeadersInit(init) + fill(this, init) + } + } + + // https://fetch.spec.whatwg.org/#dom-headers-append + append (name, value) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) + + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) + + return appendHeader(this, name, value) + } + + // https://fetch.spec.whatwg.org/#dom-headers-delete + delete (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.delete', + value: name, + type: 'header name' + }) + } + + // 2. If this’s guard is "immutable", then throw a TypeError. + // 3. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 4. Otherwise, if this’s guard is "request-no-cors", name + // is not a no-CORS-safelisted request-header name, and + // name is not a privileged no-CORS request-header name, + // return. + // 5. Otherwise, if this’s guard is "response" and name is + // a forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 6. If this’s header list does not contain name, then + // return. + if (!this[kHeadersList].contains(name)) { + return + } + + // 7. Delete name from this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this. + this[kHeadersList].delete(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-get + get (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.get', + value: name, + type: 'header name' + }) + } + + // 2. Return the result of getting name from this’s header + // list. + return this[kHeadersList].get(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-has + has (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.has', + value: name, + type: 'header name' + }) + } + + // 2. Return true if this’s header list contains name; + // otherwise false. + return this[kHeadersList].contains(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-set + set (name, value) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) + + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) + + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value, + type: 'header value' + }) + } + + // 3. If this’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 5. Otherwise, if this’s guard is "request-no-cors" and + // name/value is not a no-CORS-safelisted request-header, + // return. + // 6. Otherwise, if this’s guard is "response" and name is a + // forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 7. Set (name, value) in this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this + this[kHeadersList].set(name, value) + } + + // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie + getSetCookie () { + webidl.brandCheck(this, Headers) + + // 1. If this’s header list does not contain `Set-Cookie`, then return « ». + // 2. Return the values of all headers in this’s header list whose name is + // a byte-case-insensitive match for `Set-Cookie`, in order. + + const list = this[kHeadersList].cookies + + if (list) { + return [...list] + } + + return [] + } + + // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine + get [kHeadersSortedMap] () { + if (this[kHeadersList][kHeadersSortedMap]) { + return this[kHeadersList][kHeadersSortedMap] + } + + // 1. Let headers be an empty list of headers with the key being the name + // and value the value. + const headers = [] + + // 2. Let names be the result of convert header names to a sorted-lowercase + // set with all the names of the headers in list. + const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) + const cookies = this[kHeadersList].cookies + + // 3. For each name of names: + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] + // 1. If name is `set-cookie`, then: + if (name === 'set-cookie') { + // 1. Let values be a list of all values of headers in list whose name + // is a byte-case-insensitive match for name, in order. + + // 2. For each value of values: + // 1. Append (name, value) to headers. + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) + } + } else { + // 2. Otherwise: + + // 1. Let value be the result of getting name from list. + + // 2. Assert: value is non-null. + assert(value !== null) + + // 3. Append (name, value) to headers. + headers.push([name, value]) + } + } + + this[kHeadersList][kHeadersSortedMap] = headers + + // 4. Return headers. + return headers + } + + keys () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key' + ) + } + + values () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'value' + ) + } + + entries () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key+value' + ) + } + + /** + * @param {(value: string, key: string, self: Headers) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." + ) + } + + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } + } + + [Symbol.for('nodejs.util.inspect.custom')] () { + webidl.brandCheck(this, Headers) + + return this[kHeadersList] + } +} + +Headers.prototype[Symbol.iterator] = Headers.prototype.entries + +Object.defineProperties(Headers.prototype, { + append: kEnumerableProperty, + delete: kEnumerableProperty, + get: kEnumerableProperty, + has: kEnumerableProperty, + set: kEnumerableProperty, + getSetCookie: kEnumerableProperty, + keys: kEnumerableProperty, + values: kEnumerableProperty, + entries: kEnumerableProperty, + forEach: kEnumerableProperty, + [Symbol.iterator]: { enumerable: false }, + [Symbol.toStringTag]: { + value: 'Headers', + configurable: true + } +}) + +webidl.converters.HeadersInit = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (V[Symbol.iterator]) { + return webidl.converters['sequence>'](V) + } + + return webidl.converters['record'](V) + } + + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) +} + +module.exports = { + fill, + Headers, + HeadersList +} diff --git a/node_modules/undici/lib/fetch/index.js b/node_modules/undici/lib/fetch/index.js new file mode 100644 index 0000000..dea2069 --- /dev/null +++ b/node_modules/undici/lib/fetch/index.js @@ -0,0 +1,2148 @@ +// https://github.com/Ethan-Arrowood/undici-fetch + +'use strict' + +const { + Response, + makeNetworkError, + makeAppropriateNetworkError, + filterResponse, + makeResponse +} = require('./response') +const { Headers } = require('./headers') +const { Request, makeRequest } = require('./request') +const zlib = require('zlib') +const { + bytesMatch, + makePolicyContainer, + clonePolicyContainer, + requestBadPort, + TAOCheck, + appendRequestOriginHeader, + responseLocationURL, + requestCurrentURL, + setRequestReferrerPolicyOnRedirect, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + createOpaqueTimingInfo, + appendFetchMetadata, + corsCheck, + crossOriginResourcePolicyCheck, + determineRequestsReferrer, + coarsenedSharedCurrentTime, + createDeferredPromise, + isBlobLike, + sameOrigin, + isCancelled, + isAborted, + isErrorLike, + fullyReadBody, + readableStreamClose, + isomorphicEncode, + urlIsLocal, + urlIsHttpHttpsScheme, + urlHasHttpsScheme +} = require('./util') +const { kState, kHeaders, kGuard, kRealm } = require('./symbols') +const assert = require('assert') +const { safelyExtractBody } = require('./body') +const { + redirectStatusSet, + nullBodyStatus, + safeMethodsSet, + requestBodyHeader, + subresourceSet, + DOMException +} = require('./constants') +const { kHeadersList } = require('../core/symbols') +const EE = require('events') +const { Readable, pipeline } = require('stream') +const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util') +const { dataURLProcessor, serializeAMimeType } = require('./dataURL') +const { TransformStream } = require('stream/web') +const { getGlobalDispatcher } = require('../global') +const { webidl } = require('./webidl') +const { STATUS_CODES } = require('http') +const GET_OR_HEAD = ['GET', 'HEAD'] + +/** @type {import('buffer').resolveObjectURL} */ +let resolveObjectURL +let ReadableStream = globalThis.ReadableStream + +class Fetch extends EE { + constructor (dispatcher) { + super() + + this.dispatcher = dispatcher + this.connection = null + this.dump = false + this.state = 'ongoing' + // 2 terminated listeners get added per request, + // but only 1 gets removed. If there are 20 redirects, + // 21 listeners will be added. + // See https://github.com/nodejs/undici/issues/1711 + // TODO (fix): Find and fix root cause for leaked listener. + this.setMaxListeners(21) + } + + terminate (reason) { + if (this.state !== 'ongoing') { + return + } + + this.state = 'terminated' + this.connection?.destroy(reason) + this.emit('terminated', reason) + } + + // https://fetch.spec.whatwg.org/#fetch-controller-abort + abort (error) { + if (this.state !== 'ongoing') { + return + } + + // 1. Set controller’s state to "aborted". + this.state = 'aborted' + + // 2. Let fallbackError be an "AbortError" DOMException. + // 3. Set error to fallbackError if it is not given. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } + + // 4. Let serializedError be StructuredSerialize(error). + // If that threw an exception, catch it, and let + // serializedError be StructuredSerialize(fallbackError). + + // 5. Set controller’s serialized abort reason to serializedError. + this.serializedAbortReason = error + + this.connection?.destroy(error) + this.emit('terminated', error) + } +} + +// https://fetch.spec.whatwg.org/#fetch-method +function fetch (input, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) + + // 1. Let p be a new promise. + const p = createDeferredPromise() + + // 2. Let requestObject be the result of invoking the initial value of + // Request as constructor with input and init as arguments. If this throws + // an exception, reject p with it and return p. + let requestObject + + try { + requestObject = new Request(input, init) + } catch (e) { + p.reject(e) + return p.promise + } + + // 3. Let request be requestObject’s request. + const request = requestObject[kState] + + // 4. If requestObject’s signal’s aborted flag is set, then: + if (requestObject.signal.aborted) { + // 1. Abort the fetch() call with p, request, null, and + // requestObject’s signal’s abort reason. + abortFetch(p, request, null, requestObject.signal.reason) + + // 2. Return p. + return p.promise + } + + // 5. Let globalObject be request’s client’s global object. + const globalObject = request.client.globalObject + + // 6. If globalObject is a ServiceWorkerGlobalScope object, then set + // request’s service-workers mode to "none". + if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { + request.serviceWorkers = 'none' + } + + // 7. Let responseObject be null. + let responseObject = null + + // 8. Let relevantRealm be this’s relevant Realm. + const relevantRealm = null + + // 9. Let locallyAborted be false. + let locallyAborted = false + + // 10. Let controller be null. + let controller = null + + // 11. Add the following abort steps to requestObject’s signal: + addAbortListener( + requestObject.signal, + () => { + // 1. Set locallyAborted to true. + locallyAborted = true + + // 2. Assert: controller is non-null. + assert(controller != null) + + // 3. Abort controller with requestObject’s signal’s abort reason. + controller.abort(requestObject.signal.reason) + + // 4. Abort the fetch() call with p, request, responseObject, + // and requestObject’s signal’s abort reason. + abortFetch(p, request, responseObject, requestObject.signal.reason) + } + ) + + // 12. Let handleFetchDone given response response be to finalize and + // report timing with response, globalObject, and "fetch". + const handleFetchDone = (response) => + finalizeAndReportTiming(response, 'fetch') + + // 13. Set controller to the result of calling fetch given request, + // with processResponseEndOfBody set to handleFetchDone, and processResponse + // given response being these substeps: + + const processResponse = (response) => { + // 1. If locallyAborted is true, terminate these substeps. + if (locallyAborted) { + return Promise.resolve() + } + + // 2. If response’s aborted flag is set, then: + if (response.aborted) { + // 1. Let deserializedError be the result of deserialize a serialized + // abort reason given controller’s serialized abort reason and + // relevantRealm. + + // 2. Abort the fetch() call with p, request, responseObject, and + // deserializedError. + + abortFetch(p, request, responseObject, controller.serializedAbortReason) + return Promise.resolve() + } + + // 3. If response is a network error, then reject p with a TypeError + // and terminate these substeps. + if (response.type === 'error') { + p.reject( + Object.assign(new TypeError('fetch failed'), { cause: response.error }) + ) + return Promise.resolve() + } + + // 4. Set responseObject to the result of creating a Response object, + // given response, "immutable", and relevantRealm. + responseObject = new Response() + responseObject[kState] = response + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Resolve p with responseObject. + p.resolve(responseObject) + } + + controller = fetching({ + request, + processResponseEndOfBody: handleFetchDone, + processResponse, + dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici + }) + + // 14. Return p. + return p.promise +} + +// https://fetch.spec.whatwg.org/#finalize-and-report-timing +function finalizeAndReportTiming (response, initiatorType = 'other') { + // 1. If response is an aborted network error, then return. + if (response.type === 'error' && response.aborted) { + return + } + + // 2. If response’s URL list is null or empty, then return. + if (!response.urlList?.length) { + return + } + + // 3. Let originalURL be response’s URL list[0]. + const originalURL = response.urlList[0] + + // 4. Let timingInfo be response’s timing info. + let timingInfo = response.timingInfo + + // 5. Let cacheState be response’s cache state. + let cacheState = response.cacheState + + // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. + if (!urlIsHttpHttpsScheme(originalURL)) { + return + } + + // 7. If timingInfo is null, then return. + if (timingInfo === null) { + return + } + + // 8. If response’s timing allow passed flag is not set, then: + if (!response.timingAllowPassed) { + // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. + timingInfo = createOpaqueTimingInfo({ + startTime: timingInfo.startTime + }) + + // 2. Set cacheState to the empty string. + cacheState = '' + } + + // 9. Set timingInfo’s end time to the coarsened shared current time + // given global’s relevant settings object’s cross-origin isolated + // capability. + // TODO: given global’s relevant settings object’s cross-origin isolated + // capability? + timingInfo.endTime = coarsenedSharedCurrentTime() + + // 10. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo + + // 11. Mark resource timing for timingInfo, originalURL, initiatorType, + // global, and cacheState. + markResourceTiming( + timingInfo, + originalURL, + initiatorType, + globalThis, + cacheState + ) +} + +// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing +function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { + if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { + performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) + } +} + +// https://fetch.spec.whatwg.org/#abort-fetch +function abortFetch (p, request, responseObject, error) { + // Note: AbortSignal.reason was added in node v17.2.0 + // which would give us an undefined error to reject with. + // Remove this once node v16 is no longer supported. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } + + // 1. Reject promise with error. + p.reject(error) + + // 2. If request’s body is not null and is readable, then cancel request’s + // body with error. + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } + + // 3. If responseObject is null, then return. + if (responseObject == null) { + return + } + + // 4. Let response be responseObject’s response. + const response = responseObject[kState] + + // 5. If response’s body is not null and is readable, then error response’s + // body with error. + if (response.body != null && isReadable(response.body?.stream)) { + response.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } +} + +// https://fetch.spec.whatwg.org/#fetching +function fetching ({ + request, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseEndOfBody, + processResponseConsumeBody, + useParallelQueue = false, + dispatcher // undici +}) { + // 1. Let taskDestination be null. + let taskDestination = null + + // 2. Let crossOriginIsolatedCapability be false. + let crossOriginIsolatedCapability = false + + // 3. If request’s client is non-null, then: + if (request.client != null) { + // 1. Set taskDestination to request’s client’s global object. + taskDestination = request.client.globalObject + + // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin + // isolated capability. + crossOriginIsolatedCapability = + request.client.crossOriginIsolatedCapability + } + + // 4. If useParallelQueue is true, then set taskDestination to the result of + // starting a new parallel queue. + // TODO + + // 5. Let timingInfo be a new fetch timing info whose start time and + // post-redirect start time are the coarsened shared current time given + // crossOriginIsolatedCapability. + const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) + const timingInfo = createOpaqueTimingInfo({ + startTime: currenTime + }) + + // 6. Let fetchParams be a new fetch params whose + // request is request, + // timing info is timingInfo, + // process request body chunk length is processRequestBodyChunkLength, + // process request end-of-body is processRequestEndOfBody, + // process response is processResponse, + // process response consume body is processResponseConsumeBody, + // process response end-of-body is processResponseEndOfBody, + // task destination is taskDestination, + // and cross-origin isolated capability is crossOriginIsolatedCapability. + const fetchParams = { + controller: new Fetch(dispatcher), + request, + timingInfo, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseConsumeBody, + processResponseEndOfBody, + taskDestination, + crossOriginIsolatedCapability + } + + // 7. If request’s body is a byte sequence, then set request’s body to + // request’s body as a body. + // NOTE: Since fetching is only called from fetch, body should already be + // extracted. + assert(!request.body || request.body.stream) + + // 8. If request’s window is "client", then set request’s window to request’s + // client, if request’s client’s global object is a Window object; otherwise + // "no-window". + if (request.window === 'client') { + // TODO: What if request.client is null? + request.window = + request.client?.globalObject?.constructor?.name === 'Window' + ? request.client + : 'no-window' + } + + // 9. If request’s origin is "client", then set request’s origin to request’s + // client’s origin. + if (request.origin === 'client') { + // TODO: What if request.client is null? + request.origin = request.client?.origin + } + + // 10. If all of the following conditions are true: + // TODO + + // 11. If request’s policy container is "client", then: + if (request.policyContainer === 'client') { + // 1. If request’s client is non-null, then set request’s policy + // container to a clone of request’s client’s policy container. [HTML] + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer + ) + } else { + // 2. Otherwise, set request’s policy container to a new policy + // container. + request.policyContainer = makePolicyContainer() + } + } + + // 12. If request’s header list does not contain `Accept`, then: + if (!request.headersList.contains('accept')) { + // 1. Let value be `*/*`. + const value = '*/*' + + // 2. A user agent should set value to the first matching statement, if + // any, switching on request’s destination: + // "document" + // "frame" + // "iframe" + // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` + // "image" + // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` + // "style" + // `text/css,*/*;q=0.1` + // TODO + + // 3. Append `Accept`/value to request’s header list. + request.headersList.append('accept', value) + } + + // 13. If request’s header list does not contain `Accept-Language`, then + // user agents should append `Accept-Language`/an appropriate value to + // request’s header list. + if (!request.headersList.contains('accept-language')) { + request.headersList.append('accept-language', '*') + } + + // 14. If request’s priority is null, then use request’s initiator and + // destination appropriately in setting request’s priority to a + // user-agent-defined object. + if (request.priority === null) { + // TODO + } + + // 15. If request is a subresource request, then: + if (subresourceSet.has(request.destination)) { + // TODO + } + + // 16. Run main fetch given fetchParams. + mainFetch(fetchParams) + .catch(err => { + fetchParams.controller.terminate(err) + }) + + // 17. Return fetchParam's controller + return fetchParams.controller +} + +// https://fetch.spec.whatwg.org/#concept-main-fetch +async function mainFetch (fetchParams, recursive = false) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. If request’s local-URLs-only flag is set and request’s current URL is + // not local, then set response to a network error. + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { + response = makeNetworkError('local URLs only') + } + + // 4. Run report Content Security Policy violations for request. + // TODO + + // 5. Upgrade request to a potentially trustworthy URL, if appropriate. + tryUpgradeRequestToAPotentiallyTrustworthyURL(request) + + // 6. If should request be blocked due to a bad port, should fetching request + // be blocked as mixed content, or should request be blocked by Content + // Security Policy returns blocked, then set response to a network error. + if (requestBadPort(request) === 'blocked') { + response = makeNetworkError('bad port') + } + // TODO: should fetching request be blocked as mixed content? + // TODO: should request be blocked by Content Security Policy? + + // 7. If request’s referrer policy is the empty string, then set request’s + // referrer policy to request’s policy container’s referrer policy. + if (request.referrerPolicy === '') { + request.referrerPolicy = request.policyContainer.referrerPolicy + } + + // 8. If request’s referrer is not "no-referrer", then set request’s + // referrer to the result of invoking determine request’s referrer. + if (request.referrer !== 'no-referrer') { + request.referrer = determineRequestsReferrer(request) + } + + // 9. Set request’s current URL’s scheme to "https" if all of the following + // conditions are true: + // - request’s current URL’s scheme is "http" + // - request’s current URL’s host is a domain + // - Matching request’s current URL’s host per Known HSTS Host Domain Name + // Matching results in either a superdomain match with an asserted + // includeSubDomains directive or a congruent match (with or without an + // asserted includeSubDomains directive). [HSTS] + // TODO + + // 10. If recursive is false, then run the remaining steps in parallel. + // TODO + + // 11. If response is null, then set response to the result of running + // the steps corresponding to the first matching statement: + if (response === null) { + response = await (async () => { + const currentURL = requestCurrentURL(request) + + if ( + // - request’s current URL’s origin is same origin with request’s origin, + // and request’s response tainting is "basic" + (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || + // request’s current URL’s scheme is "data" + (currentURL.protocol === 'data:') || + // - request’s mode is "navigate" or "websocket" + (request.mode === 'navigate' || request.mode === 'websocket') + ) { + // 1. Set request’s response tainting to "basic". + request.responseTainting = 'basic' + + // 2. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } + + // request’s mode is "same-origin" + if (request.mode === 'same-origin') { + // 1. Return a network error. + return makeNetworkError('request mode cannot be "same-origin"') + } + + // request’s mode is "no-cors" + if (request.mode === 'no-cors') { + // 1. If request’s redirect mode is not "follow", then return a network + // error. + if (request.redirect !== 'follow') { + return makeNetworkError( + 'redirect mode cannot be "follow" for "no-cors" request' + ) + } + + // 2. Set request’s response tainting to "opaque". + request.responseTainting = 'opaque' + + // 3. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } + + // request’s current URL’s scheme is not an HTTP(S) scheme + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { + // Return a network error. + return makeNetworkError('URL scheme must be a HTTP(S) scheme') + } + + // - request’s use-CORS-preflight flag is set + // - request’s unsafe-request flag is set and either request’s method is + // not a CORS-safelisted method or CORS-unsafe request-header names with + // request’s header list is not empty + // 1. Set request’s response tainting to "cors". + // 2. Let corsWithPreflightResponse be the result of running HTTP fetch + // given fetchParams and true. + // 3. If corsWithPreflightResponse is a network error, then clear cache + // entries using request. + // 4. Return corsWithPreflightResponse. + // TODO + + // Otherwise + // 1. Set request’s response tainting to "cors". + request.responseTainting = 'cors' + + // 2. Return the result of running HTTP fetch given fetchParams. + return await httpFetch(fetchParams) + })() + } + + // 12. If recursive is true, then return response. + if (recursive) { + return response + } + + // 13. If response is not a network error and response is not a filtered + // response, then: + if (response.status !== 0 && !response.internalResponse) { + // If request’s response tainting is "cors", then: + if (request.responseTainting === 'cors') { + // 1. Let headerNames be the result of extracting header list values + // given `Access-Control-Expose-Headers` and response’s header list. + // TODO + // 2. If request’s credentials mode is not "include" and headerNames + // contains `*`, then set response’s CORS-exposed header-name list to + // all unique header names in response’s header list. + // TODO + // 3. Otherwise, if headerNames is not null or failure, then set + // response’s CORS-exposed header-name list to headerNames. + // TODO + } + + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (request.responseTainting === 'basic') { + response = filterResponse(response, 'basic') + } else if (request.responseTainting === 'cors') { + response = filterResponse(response, 'cors') + } else if (request.responseTainting === 'opaque') { + response = filterResponse(response, 'opaque') + } else { + assert(false) + } + } + + // 14. Let internalResponse be response, if response is a network error, + // and response’s internal response otherwise. + let internalResponse = + response.status === 0 ? response : response.internalResponse + + // 15. If internalResponse’s URL list is empty, then set it to a clone of + // request’s URL list. + if (internalResponse.urlList.length === 0) { + internalResponse.urlList.push(...request.urlList) + } + + // 16. If request’s timing allow failed flag is unset, then set + // internalResponse’s timing allow passed flag. + if (!request.timingAllowFailed) { + response.timingAllowPassed = true + } + + // 17. If response is not a network error and any of the following returns + // blocked + // - should internalResponse to request be blocked as mixed content + // - should internalResponse to request be blocked by Content Security Policy + // - should internalResponse to request be blocked due to its MIME type + // - should internalResponse to request be blocked due to nosniff + // TODO + + // 18. If response’s type is "opaque", internalResponse’s status is 206, + // internalResponse’s range-requested flag is set, and request’s header + // list does not contain `Range`, then set response and internalResponse + // to a network error. + if ( + response.type === 'opaque' && + internalResponse.status === 206 && + internalResponse.rangeRequested && + !request.headers.contains('range') + ) { + response = internalResponse = makeNetworkError() + } + + // 19. If response is not a network error and either request’s method is + // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, + // set internalResponse’s body to null and disregard any enqueuing toward + // it (if any). + if ( + response.status !== 0 && + (request.method === 'HEAD' || + request.method === 'CONNECT' || + nullBodyStatus.includes(internalResponse.status)) + ) { + internalResponse.body = null + fetchParams.controller.dump = true + } + + // 20. If request’s integrity metadata is not the empty string, then: + if (request.integrity) { + // 1. Let processBodyError be this step: run fetch finale given fetchParams + // and a network error. + const processBodyError = (reason) => + fetchFinale(fetchParams, makeNetworkError(reason)) + + // 2. If request’s response tainting is "opaque", or response’s body is null, + // then run processBodyError and abort these steps. + if (request.responseTainting === 'opaque' || response.body == null) { + processBodyError(response.error) + return + } + + // 3. Let processBody given bytes be these steps: + const processBody = (bytes) => { + // 1. If bytes do not match request’s integrity metadata, + // then run processBodyError and abort these steps. [SRI] + if (!bytesMatch(bytes, request.integrity)) { + processBodyError('integrity mismatch') + return + } + + // 2. Set response’s body to bytes as a body. + response.body = safelyExtractBody(bytes)[0] + + // 3. Run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } + + // 4. Fully read response’s body given processBody and processBodyError. + await fullyReadBody(response.body, processBody, processBodyError) + } else { + // 21. Otherwise, run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } +} + +// https://fetch.spec.whatwg.org/#concept-scheme-fetch +// given a fetch params fetchParams +function schemeFetch (fetchParams) { + // Note: since the connection is destroyed on redirect, which sets fetchParams to a + // cancelled state, we do not want this condition to trigger *unless* there have been + // no redirects. See https://github.com/nodejs/undici/issues/1776 + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) + } + + // 2. Let request be fetchParams’s request. + const { request } = fetchParams + + const { protocol: scheme } = requestCurrentURL(request) + + // 3. Switch on request’s current URL’s scheme and run the associated steps: + switch (scheme) { + case 'about:': { + // If request’s current URL’s path is the string "blank", then return a new response + // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », + // and body is the empty byte sequence as a body. + + // Otherwise, return a network error. + return Promise.resolve(makeNetworkError('about scheme is not supported')) + } + case 'blob:': { + if (!resolveObjectURL) { + resolveObjectURL = require('buffer').resolveObjectURL + } + + // 1. Let blobURLEntry be request’s current URL’s blob URL entry. + const blobURLEntry = requestCurrentURL(request) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + // Buffer.resolveObjectURL does not ignore URL queries. + if (blobURLEntry.search.length !== 0) { + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) + } + + const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) + + // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s + // object is not a Blob object, then return a network error. + if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { + return Promise.resolve(makeNetworkError('invalid method')) + } + + // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. + const bodyWithType = safelyExtractBody(blobURLEntryObject) + + // 4. Let body be bodyWithType’s body. + const body = bodyWithType[0] + + // 5. Let length be body’s length, serialized and isomorphic encoded. + const length = isomorphicEncode(`${body.length}`) + + // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. + const type = bodyWithType[1] ?? '' + + // 7. Return a new response whose status message is `OK`, header list is + // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. + const response = makeResponse({ + statusText: 'OK', + headersList: [ + ['content-length', { name: 'Content-Length', value: length }], + ['content-type', { name: 'Content-Type', value: type }] + ] + }) + + response.body = body + + return Promise.resolve(response) + } + case 'data:': { + // 1. Let dataURLStruct be the result of running the + // data: URL processor on request’s current URL. + const currentURL = requestCurrentURL(request) + const dataURLStruct = dataURLProcessor(currentURL) + + // 2. If dataURLStruct is failure, then return a + // network error. + if (dataURLStruct === 'failure') { + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) + } + + // 3. Let mimeType be dataURLStruct’s MIME type, serialized. + const mimeType = serializeAMimeType(dataURLStruct.mimeType) + + // 4. Return a response whose status message is `OK`, + // header list is « (`Content-Type`, mimeType) », + // and body is dataURLStruct’s body as a body. + return Promise.resolve(makeResponse({ + statusText: 'OK', + headersList: [ + ['content-type', { name: 'Content-Type', value: mimeType }] + ], + body: safelyExtractBody(dataURLStruct.body)[0] + })) + } + case 'file:': { + // For now, unfortunate as it is, file URLs are left as an exercise for the reader. + // When in doubt, return a network error. + return Promise.resolve(makeNetworkError('not implemented... yet...')) + } + case 'http:': + case 'https:': { + // Return the result of running HTTP fetch given fetchParams. + + return httpFetch(fetchParams) + .catch((err) => makeNetworkError(err)) + } + default: { + return Promise.resolve(makeNetworkError('unknown scheme')) + } + } +} + +// https://fetch.spec.whatwg.org/#finalize-response +function finalizeResponse (fetchParams, response) { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // 2, If fetchParams’s process response done is not null, then queue a fetch + // task to run fetchParams’s process response done given response, with + // fetchParams’s task destination. + if (fetchParams.processResponseDone != null) { + queueMicrotask(() => fetchParams.processResponseDone(response)) + } +} + +// https://fetch.spec.whatwg.org/#fetch-finale +function fetchFinale (fetchParams, response) { + // 1. If response is a network error, then: + if (response.type === 'error') { + // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». + response.urlList = [fetchParams.request.urlList[0]] + + // 2. Set response’s timing info to the result of creating an opaque timing + // info for fetchParams’s timing info. + response.timingInfo = createOpaqueTimingInfo({ + startTime: fetchParams.timingInfo.startTime + }) + } + + // 2. Let processResponseEndOfBody be the following steps: + const processResponseEndOfBody = () => { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // If fetchParams’s process response end-of-body is not null, + // then queue a fetch task to run fetchParams’s process response + // end-of-body given response with fetchParams’s task destination. + if (fetchParams.processResponseEndOfBody != null) { + queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) + } + } + + // 3. If fetchParams’s process response is non-null, then queue a fetch task + // to run fetchParams’s process response given response, with fetchParams’s + // task destination. + if (fetchParams.processResponse != null) { + queueMicrotask(() => fetchParams.processResponse(response)) + } + + // 4. If response’s body is null, then run processResponseEndOfBody. + if (response.body == null) { + processResponseEndOfBody() + } else { + // 5. Otherwise: + + // 1. Let transformStream be a new a TransformStream. + + // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, + // enqueues chunk in transformStream. + const identityTransformAlgorithm = (chunk, controller) => { + controller.enqueue(chunk) + } + + // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm + // and flushAlgorithm set to processResponseEndOfBody. + const transformStream = new TransformStream({ + start () {}, + transform: identityTransformAlgorithm, + flush: processResponseEndOfBody + }, { + size () { + return 1 + } + }, { + size () { + return 1 + } + }) + + // 4. Set response’s body to the result of piping response’s body through transformStream. + response.body = { stream: response.body.stream.pipeThrough(transformStream) } + } + + // 6. If fetchParams’s process response consume body is non-null, then: + if (fetchParams.processResponseConsumeBody != null) { + // 1. Let processBody given nullOrBytes be this step: run fetchParams’s + // process response consume body given response and nullOrBytes. + const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) + + // 2. Let processBodyError be this step: run fetchParams’s process + // response consume body given response and failure. + const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + + // 3. If response’s body is null, then queue a fetch task to run processBody + // given null, with fetchParams’s task destination. + if (response.body == null) { + queueMicrotask(() => processBody(null)) + } else { + // 4. Otherwise, fully read response’s body given processBody, processBodyError, + // and fetchParams’s task destination. + return fullyReadBody(response.body, processBody, processBodyError) + } + return Promise.resolve() + } +} + +// https://fetch.spec.whatwg.org/#http-fetch +async function httpFetch (fetchParams) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. Let actualResponse be null. + let actualResponse = null + + // 4. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 5. If request’s service-workers mode is "all", then: + if (request.serviceWorkers === 'all') { + // TODO + } + + // 6. If response is null, then: + if (response === null) { + // 1. If makeCORSPreflight is true and one of these conditions is true: + // TODO + + // 2. If request’s redirect mode is "follow", then set request’s + // service-workers mode to "none". + if (request.redirect === 'follow') { + request.serviceWorkers = 'none' + } + + // 3. Set response and actualResponse to the result of running + // HTTP-network-or-cache fetch given fetchParams. + actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) + + // 4. If request’s response tainting is "cors" and a CORS check + // for request and response returns failure, then return a network error. + if ( + request.responseTainting === 'cors' && + corsCheck(request, response) === 'failure' + ) { + return makeNetworkError('cors failure') + } + + // 5. If the TAO check for request and response returns failure, then set + // request’s timing allow failed flag. + if (TAOCheck(request, response) === 'failure') { + request.timingAllowFailed = true + } + } + + // 7. If either request’s response tainting or response’s type + // is "opaque", and the cross-origin resource policy check with + // request’s origin, request’s client, request’s destination, + // and actualResponse returns blocked, then return a network error. + if ( + (request.responseTainting === 'opaque' || response.type === 'opaque') && + crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, + actualResponse + ) === 'blocked' + ) { + return makeNetworkError('blocked') + } + + // 8. If actualResponse’s status is a redirect status, then: + if (redirectStatusSet.has(actualResponse.status)) { + // 1. If actualResponse’s status is not 303, request’s body is not null, + // and the connection uses HTTP/2, then user agents may, and are even + // encouraged to, transmit an RST_STREAM frame. + // See, https://github.com/whatwg/fetch/issues/1288 + if (request.redirect !== 'manual') { + fetchParams.controller.connection.destroy() + } + + // 2. Switch on request’s redirect mode: + if (request.redirect === 'error') { + // Set response to a network error. + response = makeNetworkError('unexpected redirect') + } else if (request.redirect === 'manual') { + // Set response to an opaque-redirect filtered response whose internal + // response is actualResponse. + // NOTE(spec): On the web this would return an `opaqueredirect` response, + // but that doesn't make sense server side. + // See https://github.com/nodejs/undici/issues/1193. + response = actualResponse + } else if (request.redirect === 'follow') { + // Set response to the result of running HTTP-redirect fetch given + // fetchParams and response. + response = await httpRedirectFetch(fetchParams, response) + } else { + assert(false) + } + } + + // 9. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo + + // 10. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-redirect-fetch +function httpRedirectFetch (fetchParams, response) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let actualResponse be response, if response is not a filtered response, + // and response’s internal response otherwise. + const actualResponse = response.internalResponse + ? response.internalResponse + : response + + // 3. Let locationURL be actualResponse’s location URL given request’s current + // URL’s fragment. + let locationURL + + try { + locationURL = responseLocationURL( + actualResponse, + requestCurrentURL(request).hash + ) + + // 4. If locationURL is null, then return response. + if (locationURL == null) { + return response + } + } catch (err) { + // 5. If locationURL is failure, then return a network error. + return Promise.resolve(makeNetworkError(err)) + } + + // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network + // error. + if (!urlIsHttpHttpsScheme(locationURL)) { + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) + } + + // 7. If request’s redirect count is 20, then return a network error. + if (request.redirectCount === 20) { + return Promise.resolve(makeNetworkError('redirect count exceeded')) + } + + // 8. Increase request’s redirect count by 1. + request.redirectCount += 1 + + // 9. If request’s mode is "cors", locationURL includes credentials, and + // request’s origin is not same origin with locationURL’s origin, then return + // a network error. + if ( + request.mode === 'cors' && + (locationURL.username || locationURL.password) && + !sameOrigin(request, locationURL) + ) { + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) + } + + // 10. If request’s response tainting is "cors" and locationURL includes + // credentials, then return a network error. + if ( + request.responseTainting === 'cors' && + (locationURL.username || locationURL.password) + ) { + return Promise.resolve(makeNetworkError( + 'URL cannot contain credentials for request mode "cors"' + )) + } + + // 11. If actualResponse’s status is not 303, request’s body is non-null, + // and request’s body’s source is null, then return a network error. + if ( + actualResponse.status !== 303 && + request.body != null && + request.body.source == null + ) { + return Promise.resolve(makeNetworkError()) + } + + // 12. If one of the following is true + // - actualResponse’s status is 301 or 302 and request’s method is `POST` + // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` + if ( + ([301, 302].includes(actualResponse.status) && request.method === 'POST') || + (actualResponse.status === 303 && + !GET_OR_HEAD.includes(request.method)) + ) { + // then: + // 1. Set request’s method to `GET` and request’s body to null. + request.method = 'GET' + request.body = null + + // 2. For each headerName of request-body-header name, delete headerName from + // request’s header list. + for (const headerName of requestBodyHeader) { + request.headersList.delete(headerName) + } + } + + // 13. If request’s current URL’s origin is not same origin with locationURL’s + // origin, then for each headerName of CORS non-wildcard request-header name, + // delete headerName from request’s header list. + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name + request.headersList.delete('authorization') + + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. + request.headersList.delete('cookie') + request.headersList.delete('host') + } + + // 14. If request’s body is non-null, then set request’s body to the first return + // value of safely extracting request’s body’s source. + if (request.body != null) { + assert(request.body.source != null) + request.body = safelyExtractBody(request.body.source)[0] + } + + // 15. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 16. Set timingInfo’s redirect end time and post-redirect start time to the + // coarsened shared current time given fetchParams’s cross-origin isolated + // capability. + timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = + coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) + + // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s + // redirect start time to timingInfo’s start time. + if (timingInfo.redirectStartTime === 0) { + timingInfo.redirectStartTime = timingInfo.startTime + } + + // 18. Append locationURL to request’s URL list. + request.urlList.push(locationURL) + + // 19. Invoke set request’s referrer policy on redirect on request and + // actualResponse. + setRequestReferrerPolicyOnRedirect(request, actualResponse) + + // 20. Return the result of running main fetch given fetchParams and true. + return mainFetch(fetchParams, true) +} + +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +async function httpNetworkOrCacheFetch ( + fetchParams, + isAuthenticationFetch = false, + isNewConnectionFetch = false +) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let httpFetchParams be null. + let httpFetchParams = null + + // 3. Let httpRequest be null. + let httpRequest = null + + // 4. Let response be null. + let response = null + + // 5. Let storedResponse be null. + // TODO: cache + + // 6. Let httpCache be null. + const httpCache = null + + // 7. Let the revalidatingFlag be unset. + const revalidatingFlag = false + + // 8. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If request’s window is "no-window" and request’s redirect mode is + // "error", then set httpFetchParams to fetchParams and httpRequest to + // request. + if (request.window === 'no-window' && request.redirect === 'error') { + httpFetchParams = fetchParams + httpRequest = request + } else { + // Otherwise: + + // 1. Set httpRequest to a clone of request. + httpRequest = makeRequest(request) + + // 2. Set httpFetchParams to a copy of fetchParams. + httpFetchParams = { ...fetchParams } + + // 3. Set httpFetchParams’s request to httpRequest. + httpFetchParams.request = httpRequest + } + + // 3. Let includeCredentials be true if one of + const includeCredentials = + request.credentials === 'include' || + (request.credentials === 'same-origin' && + request.responseTainting === 'basic') + + // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s + // body is non-null; otherwise null. + const contentLength = httpRequest.body ? httpRequest.body.length : null + + // 5. Let contentLengthHeaderValue be null. + let contentLengthHeaderValue = null + + // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or + // `PUT`, then set contentLengthHeaderValue to `0`. + if ( + httpRequest.body == null && + ['POST', 'PUT'].includes(httpRequest.method) + ) { + contentLengthHeaderValue = '0' + } + + // 7. If contentLength is non-null, then set contentLengthHeaderValue to + // contentLength, serialized and isomorphic encoded. + if (contentLength != null) { + contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) + } + + // 8. If contentLengthHeaderValue is non-null, then append + // `Content-Length`/contentLengthHeaderValue to httpRequest’s header + // list. + if (contentLengthHeaderValue != null) { + httpRequest.headersList.append('content-length', contentLengthHeaderValue) + } + + // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, + // contentLengthHeaderValue) to httpRequest’s header list. + + // 10. If contentLength is non-null and httpRequest’s keepalive is true, + // then: + if (contentLength != null && httpRequest.keepalive) { + // NOTE: keepalive is a noop outside of browser context. + } + + // 11. If httpRequest’s referrer is a URL, then append + // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, + // to httpRequest’s header list. + if (httpRequest.referrer instanceof URL) { + httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) + } + + // 12. Append a request `Origin` header for httpRequest. + appendRequestOriginHeader(httpRequest) + + // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] + appendFetchMetadata(httpRequest) + + // 14. If httpRequest’s header list does not contain `User-Agent`, then + // user agents should append `User-Agent`/default `User-Agent` value to + // httpRequest’s header list. + if (!httpRequest.headersList.contains('user-agent')) { + httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') + } + + // 15. If httpRequest’s cache mode is "default" and httpRequest’s header + // list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set + // httpRequest’s cache mode to "no-store". + if ( + httpRequest.cache === 'default' && + (httpRequest.headersList.contains('if-modified-since') || + httpRequest.headersList.contains('if-none-match') || + httpRequest.headersList.contains('if-unmodified-since') || + httpRequest.headersList.contains('if-match') || + httpRequest.headersList.contains('if-range')) + ) { + httpRequest.cache = 'no-store' + } + + // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent + // no-cache cache-control header modification flag is unset, and + // httpRequest’s header list does not contain `Cache-Control`, then append + // `Cache-Control`/`max-age=0` to httpRequest’s header list. + if ( + httpRequest.cache === 'no-cache' && + !httpRequest.preventNoCacheCacheControlHeaderModification && + !httpRequest.headersList.contains('cache-control') + ) { + httpRequest.headersList.append('cache-control', 'max-age=0') + } + + // 17. If httpRequest’s cache mode is "no-store" or "reload", then: + if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { + // 1. If httpRequest’s header list does not contain `Pragma`, then append + // `Pragma`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('pragma')) { + httpRequest.headersList.append('pragma', 'no-cache') + } + + // 2. If httpRequest’s header list does not contain `Cache-Control`, + // then append `Cache-Control`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('cache-control')) { + httpRequest.headersList.append('cache-control', 'no-cache') + } + } + + // 18. If httpRequest’s header list contains `Range`, then append + // `Accept-Encoding`/`identity` to httpRequest’s header list. + if (httpRequest.headersList.contains('range')) { + httpRequest.headersList.append('accept-encoding', 'identity') + } + + // 19. Modify httpRequest’s header list per HTTP. Do not append a given + // header if httpRequest’s header list contains that header’s name. + // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 + if (!httpRequest.headersList.contains('accept-encoding')) { + if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { + httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') + } else { + httpRequest.headersList.append('accept-encoding', 'gzip, deflate') + } + } + + httpRequest.headersList.delete('host') + + // 20. If includeCredentials is true, then: + if (includeCredentials) { + // 1. If the user agent is not configured to block cookies for httpRequest + // (see section 7 of [COOKIES]), then: + // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: + // TODO: credentials + } + + // 21. If there’s a proxy-authentication entry, use it as appropriate. + // TODO: proxy-authentication + + // 22. Set httpCache to the result of determining the HTTP cache + // partition, given httpRequest. + // TODO: cache + + // 23. If httpCache is null, then set httpRequest’s cache mode to + // "no-store". + if (httpCache == null) { + httpRequest.cache = 'no-store' + } + + // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", + // then: + if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { + // TODO: cache + } + + // 9. If aborted, then return the appropriate network error for fetchParams. + // TODO + + // 10. If response is null, then: + if (response == null) { + // 1. If httpRequest’s cache mode is "only-if-cached", then return a + // network error. + if (httpRequest.mode === 'only-if-cached') { + return makeNetworkError('only if cached') + } + + // 2. Let forwardResponse be the result of running HTTP-network fetch + // given httpFetchParams, includeCredentials, and isNewConnectionFetch. + const forwardResponse = await httpNetworkFetch( + httpFetchParams, + includeCredentials, + isNewConnectionFetch + ) + + // 3. If httpRequest’s method is unsafe and forwardResponse’s status is + // in the range 200 to 399, inclusive, invalidate appropriate stored + // responses in httpCache, as per the "Invalidation" chapter of HTTP + // Caching, and set storedResponse to null. [HTTP-CACHING] + if ( + !safeMethodsSet.has(httpRequest.method) && + forwardResponse.status >= 200 && + forwardResponse.status <= 399 + ) { + // TODO: cache + } + + // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, + // then: + if (revalidatingFlag && forwardResponse.status === 304) { + // TODO: cache + } + + // 5. If response is null, then: + if (response == null) { + // 1. Set response to forwardResponse. + response = forwardResponse + + // 2. Store httpRequest and forwardResponse in httpCache, as per the + // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] + // TODO: cache + } + } + + // 11. Set response’s URL list to a clone of httpRequest’s URL list. + response.urlList = [...httpRequest.urlList] + + // 12. If httpRequest’s header list contains `Range`, then set response’s + // range-requested flag. + if (httpRequest.headersList.contains('range')) { + response.rangeRequested = true + } + + // 13. Set response’s request-includes-credentials to includeCredentials. + response.requestIncludesCredentials = includeCredentials + + // 14. If response’s status is 401, httpRequest’s response tainting is not + // "cors", includeCredentials is true, and request’s window is an environment + // settings object, then: + // TODO + + // 15. If response’s status is 407, then: + if (response.status === 407) { + // 1. If request’s window is "no-window", then return a network error. + if (request.window === 'no-window') { + return makeNetworkError() + } + + // 2. ??? + + // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 4. Prompt the end user as appropriate in request’s window and store + // the result as a proxy-authentication entry. [HTTP-AUTH] + // TODO: Invoke some kind of callback? + + // 5. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams. + // TODO + return makeNetworkError('proxy authentication required') + } + + // 16. If all of the following are true + if ( + // response’s status is 421 + response.status === 421 && + // isNewConnectionFetch is false + !isNewConnectionFetch && + // request’s body is null, or request’s body is non-null and request’s body’s source is non-null + (request.body == null || request.body.source != null) + ) { + // then: + + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 2. Set response to the result of running HTTP-network-or-cache + // fetch given fetchParams, isAuthenticationFetch, and true. + + // TODO (spec): The spec doesn't specify this but we need to cancel + // the active response before we can start a new one. + // https://github.com/whatwg/fetch/issues/1293 + fetchParams.controller.connection.destroy() + + response = await httpNetworkOrCacheFetch( + fetchParams, + isAuthenticationFetch, + true + ) + } + + // 17. If isAuthenticationFetch is true, then create an authentication entry + if (isAuthenticationFetch) { + // TODO + } + + // 18. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-network-fetch +async function httpNetworkFetch ( + fetchParams, + includeCredentials = false, + forceNewConnection = false +) { + assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) + + fetchParams.controller.connection = { + abort: null, + destroyed: false, + destroy (err) { + if (!this.destroyed) { + this.destroyed = true + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } + } + } + + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 4. Let httpCache be the result of determining the HTTP cache partition, + // given request. + // TODO: cache + const httpCache = null + + // 5. If httpCache is null, then set request’s cache mode to "no-store". + if (httpCache == null) { + request.cache = 'no-store' + } + + // 6. Let networkPartitionKey be the result of determining the network + // partition key given request. + // TODO + + // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise + // "no". + const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars + + // 8. Switch on request’s mode: + if (request.mode === 'websocket') { + // Let connection be the result of obtaining a WebSocket connection, + // given request’s current URL. + // TODO + } else { + // Let connection be the result of obtaining a connection, given + // networkPartitionKey, request’s current URL’s origin, + // includeCredentials, and forceNewConnection. + // TODO + } + + // 9. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If connection is failure, then return a network error. + + // 2. Set timingInfo’s final connection timing info to the result of + // calling clamp and coarsen connection timing info with connection’s + // timing info, timingInfo’s post-redirect start time, and fetchParams’s + // cross-origin isolated capability. + + // 3. If connection is not an HTTP/2 connection, request’s body is non-null, + // and request’s body’s source is null, then append (`Transfer-Encoding`, + // `chunked`) to request’s header list. + + // 4. Set timingInfo’s final network-request start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated + // capability. + + // 5. Set response to the result of making an HTTP request over connection + // using request with the following caveats: + + // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] + // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] + + // - If request’s body is non-null, and request’s body’s source is null, + // then the user agent may have a buffer of up to 64 kibibytes and store + // a part of request’s body in that buffer. If the user agent reads from + // request’s body beyond that buffer’s size and the user agent needs to + // resend request, then instead return a network error. + + // - Set timingInfo’s final network-response start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated capability, + // immediately after the user agent’s HTTP parser receives the first byte + // of the response (e.g., frame header bytes for HTTP/2 or response status + // line for HTTP/1.x). + + // - Wait until all the headers are transmitted. + + // - Any responses whose status is in the range 100 to 199, inclusive, + // and is not 101, are to be ignored, except for the purposes of setting + // timingInfo’s final network-response start time above. + + // - If request’s header list contains `Transfer-Encoding`/`chunked` and + // response is transferred via HTTP/1.0 or older, then return a network + // error. + + // - If the HTTP request results in a TLS client certificate dialog, then: + + // 1. If request’s window is an environment settings object, make the + // dialog available in request’s window. + + // 2. Otherwise, return a network error. + + // To transmit request’s body body, run these steps: + let requestBody = null + // 1. If body is null and fetchParams’s process request end-of-body is + // non-null, then queue a fetch task given fetchParams’s process request + // end-of-body and fetchParams’s task destination. + if (request.body == null && fetchParams.processRequestEndOfBody) { + queueMicrotask(() => fetchParams.processRequestEndOfBody()) + } else if (request.body != null) { + // 2. Otherwise, if body is non-null: + + // 1. Let processBodyChunk given bytes be these steps: + const processBodyChunk = async function * (bytes) { + // 1. If the ongoing fetch is terminated, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. Run this step in parallel: transmit bytes. + yield bytes + + // 3. If fetchParams’s process request body is non-null, then run + // fetchParams’s process request body given bytes’s length. + fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) + } + + // 2. Let processEndOfBody be these steps: + const processEndOfBody = () => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. If fetchParams’s process request end-of-body is non-null, + // then run fetchParams’s process request end-of-body. + if (fetchParams.processRequestEndOfBody) { + fetchParams.processRequestEndOfBody() + } + } + + // 3. Let processBodyError given e be these steps: + const processBodyError = (e) => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. + if (e.name === 'AbortError') { + fetchParams.controller.abort() + } else { + fetchParams.controller.terminate(e) + } + } + + // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, + // processBodyError, and fetchParams’s task destination. + requestBody = (async function * () { + try { + for await (const bytes of request.body.stream) { + yield * processBodyChunk(bytes) + } + processEndOfBody() + } catch (err) { + processBodyError(err) + } + })() + } + + try { + // socket is only provided for websockets + const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) + + if (socket) { + response = makeResponse({ status, statusText, headersList, socket }) + } else { + const iterator = body[Symbol.asyncIterator]() + fetchParams.controller.next = () => iterator.next() + + response = makeResponse({ status, statusText, headersList }) + } + } catch (err) { + // 10. If aborted, then: + if (err.name === 'AbortError') { + // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. + fetchParams.controller.connection.destroy() + + // 2. Return the appropriate network error for fetchParams. + return makeAppropriateNetworkError(fetchParams, err) + } + + return makeNetworkError(err) + } + + // 11. Let pullAlgorithm be an action that resumes the ongoing fetch + // if it is suspended. + const pullAlgorithm = () => { + fetchParams.controller.resume() + } + + // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s + // controller with reason, given reason. + const cancelAlgorithm = (reason) => { + fetchParams.controller.abort(reason) + } + + // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by + // the user agent. + // TODO + + // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object + // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. + // TODO + + // 15. Let stream be a new ReadableStream. + // 16. Set up stream with pullAlgorithm set to pullAlgorithm, + // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to + // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + const stream = new ReadableStream( + { + async start (controller) { + fetchParams.controller.controller = controller + }, + async pull (controller) { + await pullAlgorithm(controller) + }, + async cancel (reason) { + await cancelAlgorithm(reason) + } + }, + { + highWaterMark: 0, + size () { + return 1 + } + } + ) + + // 17. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. Set response’s body to a new body whose stream is stream. + response.body = { stream } + + // 2. If response is not a network error and request’s cache mode is + // not "no-store", then update response in httpCache for request. + // TODO + + // 3. If includeCredentials is true and the user agent is not configured + // to block cookies for request (see section 7 of [COOKIES]), then run the + // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on + // the value of each header whose name is a byte-case-insensitive match for + // `Set-Cookie` in response’s header list, if any, and request’s current URL. + // TODO + + // 18. If aborted, then: + // TODO + + // 19. Run these steps in parallel: + + // 1. Run these steps, but abort when fetchParams is canceled: + fetchParams.controller.on('terminated', onAborted) + fetchParams.controller.resume = async () => { + // 1. While true + while (true) { + // 1-3. See onData... + + // 4. Set bytes to the result of handling content codings given + // codings and bytes. + let bytes + let isFailure + try { + const { done, value } = await fetchParams.controller.next() + + if (isAborted(fetchParams)) { + break + } + + bytes = done ? undefined : value + } catch (err) { + if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { + // zlib doesn't like empty streams. + bytes = undefined + } else { + bytes = err + + // err may be propagated from the result of calling readablestream.cancel, + // which might not be an error. https://github.com/nodejs/undici/issues/2009 + isFailure = true + } + } + + if (bytes === undefined) { + // 2. Otherwise, if the bytes transmission for response’s message + // body is done normally and stream is readable, then close + // stream, finalize response for fetchParams and response, and + // abort these in-parallel steps. + readableStreamClose(fetchParams.controller.controller) + + finalizeResponse(fetchParams, response) + + return + } + + // 5. Increase timingInfo’s decoded body size by bytes’s length. + timingInfo.decodedBodySize += bytes?.byteLength ?? 0 + + // 6. If bytes is failure, then terminate fetchParams’s controller. + if (isFailure) { + fetchParams.controller.terminate(bytes) + return + } + + // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes + // into stream. + fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + + // 8. If stream is errored, then terminate the ongoing fetch. + if (isErrored(stream)) { + fetchParams.controller.terminate() + return + } + + // 9. If stream doesn’t need more data ask the user agent to suspend + // the ongoing fetch. + if (!fetchParams.controller.controller.desiredSize) { + return + } + } + } + + // 2. If aborted, then: + function onAborted (reason) { + // 2. If fetchParams is aborted, then: + if (isAborted(fetchParams)) { + // 1. Set response’s aborted flag. + response.aborted = true + + // 2. If stream is readable, then error stream with the result of + // deserialize a serialized abort reason given fetchParams’s + // controller’s serialized abort reason and an + // implementation-defined realm. + if (isReadable(stream)) { + fetchParams.controller.controller.error( + fetchParams.controller.serializedAbortReason + ) + } + } else { + // 3. Otherwise, if stream is readable, error stream with a TypeError. + if (isReadable(stream)) { + fetchParams.controller.controller.error(new TypeError('terminated', { + cause: isErrorLike(reason) ? reason : undefined + })) + } + } + + // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. + // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. + fetchParams.controller.connection.destroy() + } + + // 20. Return response. + return response + + async function dispatch ({ body }) { + const url = requestCurrentURL(request) + /** @type {import('../..').Agent} */ + const agent = fetchParams.controller.dispatcher + + return new Promise((resolve, reject) => agent.dispatch( + { + path: url.pathname + url.search, + origin: url.origin, + method: request.method, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, + maxRedirections: 0, + upgrade: request.mode === 'websocket' ? 'websocket' : undefined + }, + { + body: null, + abort: null, + + onConnect (abort) { + // TODO (fix): Do we need connection here? + const { connection } = fetchParams.controller + + if (connection.destroyed) { + abort(new DOMException('The operation was aborted.', 'AbortError')) + } else { + fetchParams.controller.on('terminated', abort) + this.abort = connection.abort = abort + } + }, + + onHeaders (status, headersList, resume, statusText) { + if (status < 200) { + return + } + + let codings = [] + let location = '' + + const headers = new Headers() + + // For H2, the headers are a plain JS object + // We distinguish between them and iterate accordingly + if (Array.isArray(headersList)) { + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()) + } else if (key.toLowerCase() === 'location') { + location = val + } + + headers[kHeadersList].append(key, val) + } + } else { + const keys = Object.keys(headersList) + for (const key of keys) { + const val = headersList[key] + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() + } else if (key.toLowerCase() === 'location') { + location = val + } + + headers[kHeadersList].append(key, val) + } + } + + this.body = new Readable({ read: resume }) + + const decoders = [] + + const willFollow = request.redirect === 'follow' && + location && + redirectStatusSet.has(status) + + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding + if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { + for (const coding of codings) { + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 + if (coding === 'x-gzip' || coding === 'gzip') { + decoders.push(zlib.createGunzip({ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) + } else if (coding === 'deflate') { + decoders.push(zlib.createInflate()) + } else if (coding === 'br') { + decoders.push(zlib.createBrotliDecompress()) + } else { + decoders.length = 0 + break + } + } + } + + resolve({ + status, + statusText, + headersList: headers[kHeadersList], + body: decoders.length + ? pipeline(this.body, ...decoders, () => { }) + : this.body.on('error', () => {}) + }) + + return true + }, + + onData (chunk) { + if (fetchParams.controller.dump) { + return + } + + // 1. If one or more bytes have been transmitted from response’s + // message body, then: + + // 1. Let bytes be the transmitted bytes. + const bytes = chunk + + // 2. Let codings be the result of extracting header list values + // given `Content-Encoding` and response’s header list. + // See pullAlgorithm. + + // 3. Increase timingInfo’s encoded body size by bytes’s length. + timingInfo.encodedBodySize += bytes.byteLength + + // 4. See pullAlgorithm... + + return this.body.push(bytes) + }, + + onComplete () { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } + + fetchParams.controller.ended = true + + this.body.push(null) + }, + + onError (error) { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } + + this.body?.destroy(error) + + fetchParams.controller.terminate(error) + + reject(error) + }, + + onUpgrade (status, headersList, socket) { + if (status !== 101) { + return + } + + const headers = new Headers() + + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + + headers[kHeadersList].append(key, val) + } + + resolve({ + status, + statusText: STATUS_CODES[status], + headersList: headers[kHeadersList], + socket + }) + + return true + } + } + )) + } +} + +module.exports = { + fetch, + Fetch, + fetching, + finalizeAndReportTiming +} diff --git a/node_modules/undici/lib/fetch/request.js b/node_modules/undici/lib/fetch/request.js new file mode 100644 index 0000000..6fe4dff --- /dev/null +++ b/node_modules/undici/lib/fetch/request.js @@ -0,0 +1,946 @@ +/* globals AbortController */ + +'use strict' + +const { extractBody, mixinBody, cloneBody } = require('./body') +const { Headers, fill: fillHeaders, HeadersList } = require('./headers') +const { FinalizationRegistry } = require('../compat/dispatcher-weakref')() +const util = require('../core/util') +const { + isValidHTTPToken, + sameOrigin, + normalizeMethod, + makePolicyContainer, + normalizeMethodRecord +} = require('./util') +const { + forbiddenMethodsSet, + corsSafeListedMethodsSet, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + requestDuplex +} = require('./constants') +const { kEnumerableProperty } = util +const { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols') +const { webidl } = require('./webidl') +const { getGlobalOrigin } = require('./global') +const { URLSerializer } = require('./dataURL') +const { kHeadersList, kConstruct } = require('../core/symbols') +const assert = require('assert') +const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events') + +let TransformStream = globalThis.TransformStream + +const kAbortController = Symbol('abortController') + +const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { + signal.removeEventListener('abort', abort) +}) + +// https://fetch.spec.whatwg.org/#request-class +class Request { + // https://fetch.spec.whatwg.org/#dom-request + constructor (input, init = {}) { + if (input === kConstruct) { + return + } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) + + input = webidl.converters.RequestInfo(input) + init = webidl.converters.RequestInit(init) + + // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object + this[kRealm] = { + settingsObject: { + baseUrl: getGlobalOrigin(), + get origin () { + return this.baseUrl?.origin + }, + policyContainer: makePolicyContainer() + } + } + + // 1. Let request be null. + let request = null + + // 2. Let fallbackMode be null. + let fallbackMode = null + + // 3. Let baseURL be this’s relevant settings object’s API base URL. + const baseUrl = this[kRealm].settingsObject.baseUrl + + // 4. Let signal be null. + let signal = null + + // 5. If input is a string, then: + if (typeof input === 'string') { + // 1. Let parsedURL be the result of parsing input with baseURL. + // 2. If parsedURL is failure, then throw a TypeError. + let parsedURL + try { + parsedURL = new URL(input, baseUrl) + } catch (err) { + throw new TypeError('Failed to parse URL from ' + input, { cause: err }) + } + + // 3. If parsedURL includes credentials, then throw a TypeError. + if (parsedURL.username || parsedURL.password) { + throw new TypeError( + 'Request cannot be constructed from a URL that includes credentials: ' + + input + ) + } + + // 4. Set request to a new request whose URL is parsedURL. + request = makeRequest({ urlList: [parsedURL] }) + + // 5. Set fallbackMode to "cors". + fallbackMode = 'cors' + } else { + // 6. Otherwise: + + // 7. Assert: input is a Request object. + assert(input instanceof Request) + + // 8. Set request to input’s request. + request = input[kState] + + // 9. Set signal to input’s signal. + signal = input[kSignal] + } + + // 7. Let origin be this’s relevant settings object’s origin. + const origin = this[kRealm].settingsObject.origin + + // 8. Let window be "client". + let window = 'client' + + // 9. If request’s window is an environment settings object and its origin + // is same origin with origin, then set window to request’s window. + if ( + request.window?.constructor?.name === 'EnvironmentSettingsObject' && + sameOrigin(request.window, origin) + ) { + window = request.window + } + + // 10. If init["window"] exists and is non-null, then throw a TypeError. + if (init.window != null) { + throw new TypeError(`'window' option '${window}' must be null`) + } + + // 11. If init["window"] exists, then set window to "no-window". + if ('window' in init) { + window = 'no-window' + } + + // 12. Set request to a new request with the following properties: + request = makeRequest({ + // URL request’s URL. + // undici implementation note: this is set as the first item in request's urlList in makeRequest + // method request’s method. + method: request.method, + // header list A copy of request’s header list. + // undici implementation note: headersList is cloned in makeRequest + headersList: request.headersList, + // unsafe-request flag Set. + unsafeRequest: request.unsafeRequest, + // client This’s relevant settings object. + client: this[kRealm].settingsObject, + // window window. + window, + // priority request’s priority. + priority: request.priority, + // origin request’s origin. The propagation of the origin is only significant for navigation requests + // being handled by a service worker. In this scenario a request can have an origin that is different + // from the current client. + origin: request.origin, + // referrer request’s referrer. + referrer: request.referrer, + // referrer policy request’s referrer policy. + referrerPolicy: request.referrerPolicy, + // mode request’s mode. + mode: request.mode, + // credentials mode request’s credentials mode. + credentials: request.credentials, + // cache mode request’s cache mode. + cache: request.cache, + // redirect mode request’s redirect mode. + redirect: request.redirect, + // integrity metadata request’s integrity metadata. + integrity: request.integrity, + // keepalive request’s keepalive. + keepalive: request.keepalive, + // reload-navigation flag request’s reload-navigation flag. + reloadNavigation: request.reloadNavigation, + // history-navigation flag request’s history-navigation flag. + historyNavigation: request.historyNavigation, + // URL list A clone of request’s URL list. + urlList: [...request.urlList] + }) + + const initHasKey = Object.keys(init).length !== 0 + + // 13. If init is not empty, then: + if (initHasKey) { + // 1. If request’s mode is "navigate", then set it to "same-origin". + if (request.mode === 'navigate') { + request.mode = 'same-origin' + } + + // 2. Unset request’s reload-navigation flag. + request.reloadNavigation = false + + // 3. Unset request’s history-navigation flag. + request.historyNavigation = false + + // 4. Set request’s origin to "client". + request.origin = 'client' + + // 5. Set request’s referrer to "client" + request.referrer = 'client' + + // 6. Set request’s referrer policy to the empty string. + request.referrerPolicy = '' + + // 7. Set request’s URL to request’s current URL. + request.url = request.urlList[request.urlList.length - 1] + + // 8. Set request’s URL list to « request’s URL ». + request.urlList = [request.url] + } + + // 14. If init["referrer"] exists, then: + if (init.referrer !== undefined) { + // 1. Let referrer be init["referrer"]. + const referrer = init.referrer + + // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". + if (referrer === '') { + request.referrer = 'no-referrer' + } else { + // 1. Let parsedReferrer be the result of parsing referrer with + // baseURL. + // 2. If parsedReferrer is failure, then throw a TypeError. + let parsedReferrer + try { + parsedReferrer = new URL(referrer, baseUrl) + } catch (err) { + throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) + } + + // 3. If one of the following is true + // - parsedReferrer’s scheme is "about" and path is the string "client" + // - parsedReferrer’s origin is not same origin with origin + // then set request’s referrer to "client". + if ( + (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || + (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + ) { + request.referrer = 'client' + } else { + // 4. Otherwise, set request’s referrer to parsedReferrer. + request.referrer = parsedReferrer + } + } + } + + // 15. If init["referrerPolicy"] exists, then set request’s referrer policy + // to it. + if (init.referrerPolicy !== undefined) { + request.referrerPolicy = init.referrerPolicy + } + + // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. + let mode + if (init.mode !== undefined) { + mode = init.mode + } else { + mode = fallbackMode + } + + // 17. If mode is "navigate", then throw a TypeError. + if (mode === 'navigate') { + throw webidl.errors.exception({ + header: 'Request constructor', + message: 'invalid request mode navigate.' + }) + } + + // 18. If mode is non-null, set request’s mode to mode. + if (mode != null) { + request.mode = mode + } + + // 19. If init["credentials"] exists, then set request’s credentials mode + // to it. + if (init.credentials !== undefined) { + request.credentials = init.credentials + } + + // 18. If init["cache"] exists, then set request’s cache mode to it. + if (init.cache !== undefined) { + request.cache = init.cache + } + + // 21. If request’s cache mode is "only-if-cached" and request’s mode is + // not "same-origin", then throw a TypeError. + if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { + throw new TypeError( + "'only-if-cached' can be set only with 'same-origin' mode" + ) + } + + // 22. If init["redirect"] exists, then set request’s redirect mode to it. + if (init.redirect !== undefined) { + request.redirect = init.redirect + } + + // 23. If init["integrity"] exists, then set request’s integrity metadata to it. + if (init.integrity != null) { + request.integrity = String(init.integrity) + } + + // 24. If init["keepalive"] exists, then set request’s keepalive to it. + if (init.keepalive !== undefined) { + request.keepalive = Boolean(init.keepalive) + } + + // 25. If init["method"] exists, then: + if (init.method !== undefined) { + // 1. Let method be init["method"]. + let method = init.method + + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } + + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) + } + + // 3. Normalize method. + method = normalizeMethodRecord[method] ?? normalizeMethod(method) + + // 4. Set request’s method to method. + request.method = method + } + + // 26. If init["signal"] exists, then set signal to it. + if (init.signal !== undefined) { + signal = init.signal + } + + // 27. Set this’s request to request. + this[kState] = request + + // 28. Set this’s signal to a new AbortSignal object with this’s relevant + // Realm. + // TODO: could this be simplified with AbortSignal.any + // (https://dom.spec.whatwg.org/#dom-abortsignal-any) + const ac = new AbortController() + this[kSignal] = ac.signal + this[kSignal][kRealm] = this[kRealm] + + // 29. If signal is not null, then make this’s signal follow signal. + if (signal != null) { + if ( + !signal || + typeof signal.aborted !== 'boolean' || + typeof signal.addEventListener !== 'function' + ) { + throw new TypeError( + "Failed to construct 'Request': member signal is not of type AbortSignal." + ) + } + + if (signal.aborted) { + ac.abort(signal.reason) + } else { + // Keep a strong ref to ac while request object + // is alive. This is needed to prevent AbortController + // from being prematurely garbage collected. + // See, https://github.com/nodejs/undici/issues/1926. + this[kAbortController] = ac + + const acRef = new WeakRef(ac) + const abort = function () { + const ac = acRef.deref() + if (ac !== undefined) { + ac.abort(this.reason) + } + } + + // Third-party AbortControllers may not work with these. + // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. + try { + // If the max amount of listeners is equal to the default, increase it + // This is only available in node >= v19.9.0 + if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { + setMaxListeners(100, signal) + } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { + setMaxListeners(100, signal) + } + } catch {} + + util.addAbortListener(signal, abort) + requestFinalizer.register(ac, { signal, abort }) + } + } + + // 30. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is request’s header list and guard is + // "request". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kHeadersList] = request.headersList + this[kHeaders][kGuard] = 'request' + this[kHeaders][kRealm] = this[kRealm] + + // 31. If this’s request’s mode is "no-cors", then: + if (mode === 'no-cors') { + // 1. If this’s request’s method is not a CORS-safelisted method, + // then throw a TypeError. + if (!corsSafeListedMethodsSet.has(request.method)) { + throw new TypeError( + `'${request.method} is unsupported in no-cors mode.` + ) + } + + // 2. Set this’s headers’s guard to "request-no-cors". + this[kHeaders][kGuard] = 'request-no-cors' + } + + // 32. If init is not empty, then: + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] + // 1. Let headers be a copy of this’s headers and its associated header + // list. + // 2. If init["headers"] exists, then set headers to init["headers"]. + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) + + // 3. Empty this’s headers’s header list. + headersList.clear() + + // 4. If headers is a Headers object, then for each header in its header + // list, append header’s name/header’s value to this’s headers. + if (headers instanceof HeadersList) { + for (const [key, val] of headers) { + headersList.append(key, val) + } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies + } else { + // 5. Otherwise, fill this’s headers with headers. + fillHeaders(this[kHeaders], headers) + } + } + + // 33. Let inputBody be input’s request’s body if input is a Request + // object; otherwise null. + const inputBody = input instanceof Request ? input[kState].body : null + + // 34. If either init["body"] exists and is non-null or inputBody is + // non-null, and request’s method is `GET` or `HEAD`, then throw a + // TypeError. + if ( + (init.body != null || inputBody != null) && + (request.method === 'GET' || request.method === 'HEAD') + ) { + throw new TypeError('Request with GET/HEAD method cannot have body.') + } + + // 35. Let initBody be null. + let initBody = null + + // 36. If init["body"] exists and is non-null, then: + if (init.body != null) { + // 1. Let Content-Type be null. + // 2. Set initBody and Content-Type to the result of extracting + // init["body"], with keepalive set to request’s keepalive. + const [extractedBody, contentType] = extractBody( + init.body, + request.keepalive + ) + initBody = extractedBody + + // 3, If Content-Type is non-null and this’s headers’s header list does + // not contain `Content-Type`, then append `Content-Type`/Content-Type to + // this’s headers. + if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + this[kHeaders].append('content-type', contentType) + } + } + + // 37. Let inputOrInitBody be initBody if it is non-null; otherwise + // inputBody. + const inputOrInitBody = initBody ?? inputBody + + // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is + // null, then: + if (inputOrInitBody != null && inputOrInitBody.source == null) { + // 1. If initBody is non-null and init["duplex"] does not exist, + // then throw a TypeError. + if (initBody != null && init.duplex == null) { + throw new TypeError('RequestInit: duplex option is required when sending a body.') + } + + // 2. If this’s request’s mode is neither "same-origin" nor "cors", + // then throw a TypeError. + if (request.mode !== 'same-origin' && request.mode !== 'cors') { + throw new TypeError( + 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' + ) + } + + // 3. Set this’s request’s use-CORS-preflight flag. + request.useCORSPreflightFlag = true + } + + // 39. Let finalBody be inputOrInitBody. + let finalBody = inputOrInitBody + + // 40. If initBody is null and inputBody is non-null, then: + if (initBody == null && inputBody != null) { + // 1. If input is unusable, then throw a TypeError. + if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { + throw new TypeError( + 'Cannot construct a Request with a Request object that has already been used.' + ) + } + + // 2. Set finalBody to the result of creating a proxy for inputBody. + if (!TransformStream) { + TransformStream = require('stream/web').TransformStream + } + + // https://streams.spec.whatwg.org/#readablestream-create-a-proxy + const identityTransform = new TransformStream() + inputBody.stream.pipeThrough(identityTransform) + finalBody = { + source: inputBody.source, + length: inputBody.length, + stream: identityTransform.readable + } + } + + // 41. Set this’s request’s body to finalBody. + this[kState].body = finalBody + } + + // Returns request’s HTTP method, which is "GET" by default. + get method () { + webidl.brandCheck(this, Request) + + // The method getter steps are to return this’s request’s method. + return this[kState].method + } + + // Returns the URL of request as a string. + get url () { + webidl.brandCheck(this, Request) + + // The url getter steps are to return this’s request’s URL, serialized. + return URLSerializer(this[kState].url) + } + + // Returns a Headers object consisting of the headers associated with request. + // Note that headers added in the network layer by the user agent will not + // be accounted for in this object, e.g., the "Host" header. + get headers () { + webidl.brandCheck(this, Request) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } + + // Returns the kind of resource requested by request, e.g., "document" + // or "script". + get destination () { + webidl.brandCheck(this, Request) + + // The destination getter are to return this’s request’s destination. + return this[kState].destination + } + + // Returns the referrer of request. Its value can be a same-origin URL if + // explicitly set in init, the empty string to indicate no referrer, and + // "about:client" when defaulting to the global’s default. This is used + // during fetching to determine the value of the `Referer` header of the + // request being made. + get referrer () { + webidl.brandCheck(this, Request) + + // 1. If this’s request’s referrer is "no-referrer", then return the + // empty string. + if (this[kState].referrer === 'no-referrer') { + return '' + } + + // 2. If this’s request’s referrer is "client", then return + // "about:client". + if (this[kState].referrer === 'client') { + return 'about:client' + } + + // Return this’s request’s referrer, serialized. + return this[kState].referrer.toString() + } + + // Returns the referrer policy associated with request. + // This is used during fetching to compute the value of the request’s + // referrer. + get referrerPolicy () { + webidl.brandCheck(this, Request) + + // The referrerPolicy getter steps are to return this’s request’s referrer policy. + return this[kState].referrerPolicy + } + + // Returns the mode associated with request, which is a string indicating + // whether the request will use CORS, or will be restricted to same-origin + // URLs. + get mode () { + webidl.brandCheck(this, Request) + + // The mode getter steps are to return this’s request’s mode. + return this[kState].mode + } + + // Returns the credentials mode associated with request, + // which is a string indicating whether credentials will be sent with the + // request always, never, or only when sent to a same-origin URL. + get credentials () { + // The credentials getter steps are to return this’s request’s credentials mode. + return this[kState].credentials + } + + // Returns the cache mode associated with request, + // which is a string indicating how the request will + // interact with the browser’s cache when fetching. + get cache () { + webidl.brandCheck(this, Request) + + // The cache getter steps are to return this’s request’s cache mode. + return this[kState].cache + } + + // Returns the redirect mode associated with request, + // which is a string indicating how redirects for the + // request will be handled during fetching. A request + // will follow redirects by default. + get redirect () { + webidl.brandCheck(this, Request) + + // The redirect getter steps are to return this’s request’s redirect mode. + return this[kState].redirect + } + + // Returns request’s subresource integrity metadata, which is a + // cryptographic hash of the resource being fetched. Its value + // consists of multiple hashes separated by whitespace. [SRI] + get integrity () { + webidl.brandCheck(this, Request) + + // The integrity getter steps are to return this’s request’s integrity + // metadata. + return this[kState].integrity + } + + // Returns a boolean indicating whether or not request can outlive the + // global in which it was created. + get keepalive () { + webidl.brandCheck(this, Request) + + // The keepalive getter steps are to return this’s request’s keepalive. + return this[kState].keepalive + } + + // Returns a boolean indicating whether or not request is for a reload + // navigation. + get isReloadNavigation () { + webidl.brandCheck(this, Request) + + // The isReloadNavigation getter steps are to return true if this’s + // request’s reload-navigation flag is set; otherwise false. + return this[kState].reloadNavigation + } + + // Returns a boolean indicating whether or not request is for a history + // navigation (a.k.a. back-foward navigation). + get isHistoryNavigation () { + webidl.brandCheck(this, Request) + + // The isHistoryNavigation getter steps are to return true if this’s request’s + // history-navigation flag is set; otherwise false. + return this[kState].historyNavigation + } + + // Returns the signal associated with request, which is an AbortSignal + // object indicating whether or not request has been aborted, and its + // abort event handler. + get signal () { + webidl.brandCheck(this, Request) + + // The signal getter steps are to return this’s signal. + return this[kSignal] + } + + get body () { + webidl.brandCheck(this, Request) + + return this[kState].body ? this[kState].body.stream : null + } + + get bodyUsed () { + webidl.brandCheck(this, Request) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } + + get duplex () { + webidl.brandCheck(this, Request) + + return 'half' + } + + // Returns a clone of request. + clone () { + webidl.brandCheck(this, Request) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || this.body?.locked) { + throw new TypeError('unusable') + } + + // 2. Let clonedRequest be the result of cloning this’s request. + const clonedRequest = cloneRequest(this[kState]) + + // 3. Let clonedRequestObject be the result of creating a Request object, + // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. + const clonedRequestObject = new Request(kConstruct) + clonedRequestObject[kState] = clonedRequest + clonedRequestObject[kRealm] = this[kRealm] + clonedRequestObject[kHeaders] = new Headers(kConstruct) + clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList + clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + // 4. Make clonedRequestObject’s signal follow this’s signal. + const ac = new AbortController() + if (this.signal.aborted) { + ac.abort(this.signal.reason) + } else { + util.addAbortListener( + this.signal, + () => { + ac.abort(this.signal.reason) + } + ) + } + clonedRequestObject[kSignal] = ac.signal + + // 4. Return clonedRequestObject. + return clonedRequestObject + } +} + +mixinBody(Request) + +function makeRequest (init) { + // https://fetch.spec.whatwg.org/#requests + const request = { + method: 'GET', + localURLsOnly: false, + unsafeRequest: false, + body: null, + client: null, + reservedClient: null, + replacesClientId: '', + window: 'client', + keepalive: false, + serviceWorkers: 'all', + initiator: '', + destination: '', + priority: null, + origin: 'client', + policyContainer: 'client', + referrer: 'client', + referrerPolicy: '', + mode: 'no-cors', + useCORSPreflightFlag: false, + credentials: 'same-origin', + useCredentials: false, + cache: 'default', + redirect: 'follow', + integrity: '', + cryptoGraphicsNonceMetadata: '', + parserMetadata: '', + reloadNavigation: false, + historyNavigation: false, + userActivation: false, + taintedOrigin: false, + redirectCount: 0, + responseTainting: 'basic', + preventNoCacheCacheControlHeaderModification: false, + done: false, + timingAllowFailed: false, + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList() + } + request.url = request.urlList[0] + return request +} + +// https://fetch.spec.whatwg.org/#concept-request-clone +function cloneRequest (request) { + // To clone a request request, run these steps: + + // 1. Let newRequest be a copy of request, except for its body. + const newRequest = makeRequest({ ...request, body: null }) + + // 2. If request’s body is non-null, set newRequest’s body to the + // result of cloning request’s body. + if (request.body != null) { + newRequest.body = cloneBody(request.body) + } + + // 3. Return newRequest. + return newRequest +} + +Object.defineProperties(Request.prototype, { + method: kEnumerableProperty, + url: kEnumerableProperty, + headers: kEnumerableProperty, + redirect: kEnumerableProperty, + clone: kEnumerableProperty, + signal: kEnumerableProperty, + duplex: kEnumerableProperty, + destination: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + isHistoryNavigation: kEnumerableProperty, + isReloadNavigation: kEnumerableProperty, + keepalive: kEnumerableProperty, + integrity: kEnumerableProperty, + cache: kEnumerableProperty, + credentials: kEnumerableProperty, + attribute: kEnumerableProperty, + referrerPolicy: kEnumerableProperty, + referrer: kEnumerableProperty, + mode: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Request', + configurable: true + } +}) + +webidl.converters.Request = webidl.interfaceConverter( + Request +) + +// https://fetch.spec.whatwg.org/#requestinfo +webidl.converters.RequestInfo = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } + + if (V instanceof Request) { + return webidl.converters.Request(V) + } + + return webidl.converters.USVString(V) +} + +webidl.converters.AbortSignal = webidl.interfaceConverter( + AbortSignal +) + +// https://fetch.spec.whatwg.org/#requestinit +webidl.converters.RequestInit = webidl.dictionaryConverter([ + { + key: 'method', + converter: webidl.converters.ByteString + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + }, + { + key: 'body', + converter: webidl.nullableConverter( + webidl.converters.BodyInit + ) + }, + { + key: 'referrer', + converter: webidl.converters.USVString + }, + { + key: 'referrerPolicy', + converter: webidl.converters.DOMString, + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy + allowedValues: referrerPolicy + }, + { + key: 'mode', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#concept-request-mode + allowedValues: requestMode + }, + { + key: 'credentials', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcredentials + allowedValues: requestCredentials + }, + { + key: 'cache', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcache + allowedValues: requestCache + }, + { + key: 'redirect', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestredirect + allowedValues: requestRedirect + }, + { + key: 'integrity', + converter: webidl.converters.DOMString + }, + { + key: 'keepalive', + converter: webidl.converters.boolean + }, + { + key: 'signal', + converter: webidl.nullableConverter( + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) + ) + }, + { + key: 'window', + converter: webidl.converters.any + }, + { + key: 'duplex', + converter: webidl.converters.DOMString, + allowedValues: requestDuplex + } +]) + +module.exports = { Request, makeRequest } diff --git a/node_modules/undici/lib/fetch/response.js b/node_modules/undici/lib/fetch/response.js new file mode 100644 index 0000000..7338612 --- /dev/null +++ b/node_modules/undici/lib/fetch/response.js @@ -0,0 +1,571 @@ +'use strict' + +const { Headers, HeadersList, fill } = require('./headers') +const { extractBody, cloneBody, mixinBody } = require('./body') +const util = require('../core/util') +const { kEnumerableProperty } = util +const { + isValidReasonPhrase, + isCancelled, + isAborted, + isBlobLike, + serializeJavascriptValueToJSONString, + isErrorLike, + isomorphicEncode +} = require('./util') +const { + redirectStatusSet, + nullBodyStatus, + DOMException +} = require('./constants') +const { kState, kHeaders, kGuard, kRealm } = require('./symbols') +const { webidl } = require('./webidl') +const { FormData } = require('./formdata') +const { getGlobalOrigin } = require('./global') +const { URLSerializer } = require('./dataURL') +const { kHeadersList, kConstruct } = require('../core/symbols') +const assert = require('assert') +const { types } = require('util') + +const ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream +const textEncoder = new TextEncoder('utf-8') + +// https://fetch.spec.whatwg.org/#response-class +class Response { + // Creates network error Response. + static error () { + // TODO + const relevantRealm = { settingsObject: {} } + + // The static error() method steps are to return the result of creating a + // Response object, given a new network error, "immutable", and this’s + // relevant Realm. + const responseObject = new Response() + responseObject[kState] = makeNetworkError() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response-json + static json (data, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) + + if (init !== null) { + init = webidl.converters.ResponseInit(init) + } + + // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. + const bytes = textEncoder.encode( + serializeJavascriptValueToJSONString(data) + ) + + // 2. Let body be the result of extracting bytes. + const body = extractBody(bytes) + + // 3. Let responseObject be the result of creating a Response object, given a new response, + // "response", and this’s relevant Realm. + const relevantRealm = { settingsObject: {} } + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'response' + responseObject[kHeaders][kRealm] = relevantRealm + + // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). + initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) + + // 5. Return responseObject. + return responseObject + } + + // Creates a redirect Response that redirects to url with status status. + static redirect (url, status = 302) { + const relevantRealm = { settingsObject: {} } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) + + url = webidl.converters.USVString(url) + status = webidl.converters['unsigned short'](status) + + // 1. Let parsedURL be the result of parsing url with current settings + // object’s API base URL. + // 2. If parsedURL is failure, then throw a TypeError. + // TODO: base-URL? + let parsedURL + try { + parsedURL = new URL(url, getGlobalOrigin()) + } catch (err) { + throw Object.assign(new TypeError('Failed to parse URL from ' + url), { + cause: err + }) + } + + // 3. If status is not a redirect status, then throw a RangeError. + if (!redirectStatusSet.has(status)) { + throw new RangeError('Invalid status code ' + status) + } + + // 4. Let responseObject be the result of creating a Response object, + // given a new response, "immutable", and this’s relevant Realm. + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Set responseObject’s response’s status to status. + responseObject[kState].status = status + + // 6. Let value be parsedURL, serialized and isomorphic encoded. + const value = isomorphicEncode(URLSerializer(parsedURL)) + + // 7. Append `Location`/value to responseObject’s response’s header list. + responseObject[kState].headersList.append('location', value) + + // 8. Return responseObject. + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response + constructor (body = null, init = {}) { + if (body !== null) { + body = webidl.converters.BodyInit(body) + } + + init = webidl.converters.ResponseInit(init) + + // TODO + this[kRealm] = { settingsObject: {} } + + // 1. Set this’s response to a new response. + this[kState] = makeResponse({}) + + // 2. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is this’s response’s header list and guard + // is "response". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kGuard] = 'response' + this[kHeaders][kHeadersList] = this[kState].headersList + this[kHeaders][kRealm] = this[kRealm] + + // 3. Let bodyWithType be null. + let bodyWithType = null + + // 4. If body is non-null, then set bodyWithType to the result of extracting body. + if (body != null) { + const [extractedBody, type] = extractBody(body) + bodyWithType = { body: extractedBody, type } + } + + // 5. Perform initialize a response given this, init, and bodyWithType. + initializeResponse(this, init, bodyWithType) + } + + // Returns response’s type, e.g., "cors". + get type () { + webidl.brandCheck(this, Response) + + // The type getter steps are to return this’s response’s type. + return this[kState].type + } + + // Returns response’s URL, if it has one; otherwise the empty string. + get url () { + webidl.brandCheck(this, Response) + + const urlList = this[kState].urlList + + // The url getter steps are to return the empty string if this’s + // response’s URL is null; otherwise this’s response’s URL, + // serialized with exclude fragment set to true. + const url = urlList[urlList.length - 1] ?? null + + if (url === null) { + return '' + } + + return URLSerializer(url, true) + } + + // Returns whether response was obtained through a redirect. + get redirected () { + webidl.brandCheck(this, Response) + + // The redirected getter steps are to return true if this’s response’s URL + // list has more than one item; otherwise false. + return this[kState].urlList.length > 1 + } + + // Returns response’s status. + get status () { + webidl.brandCheck(this, Response) + + // The status getter steps are to return this’s response’s status. + return this[kState].status + } + + // Returns whether response’s status is an ok status. + get ok () { + webidl.brandCheck(this, Response) + + // The ok getter steps are to return true if this’s response’s status is an + // ok status; otherwise false. + return this[kState].status >= 200 && this[kState].status <= 299 + } + + // Returns response’s status message. + get statusText () { + webidl.brandCheck(this, Response) + + // The statusText getter steps are to return this’s response’s status + // message. + return this[kState].statusText + } + + // Returns response’s headers as Headers. + get headers () { + webidl.brandCheck(this, Response) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } + + get body () { + webidl.brandCheck(this, Response) + + return this[kState].body ? this[kState].body.stream : null + } + + get bodyUsed () { + webidl.brandCheck(this, Response) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } + + // Returns a clone of response. + clone () { + webidl.brandCheck(this, Response) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || (this.body && this.body.locked)) { + throw webidl.errors.exception({ + header: 'Response.clone', + message: 'Body has already been consumed.' + }) + } + + // 2. Let clonedResponse be the result of cloning this’s response. + const clonedResponse = cloneResponse(this[kState]) + + // 3. Return the result of creating a Response object, given + // clonedResponse, this’s headers’s guard, and this’s relevant Realm. + const clonedResponseObject = new Response() + clonedResponseObject[kState] = clonedResponse + clonedResponseObject[kRealm] = this[kRealm] + clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList + clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + return clonedResponseObject + } +} + +mixinBody(Response) + +Object.defineProperties(Response.prototype, { + type: kEnumerableProperty, + url: kEnumerableProperty, + status: kEnumerableProperty, + ok: kEnumerableProperty, + redirected: kEnumerableProperty, + statusText: kEnumerableProperty, + headers: kEnumerableProperty, + clone: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Response', + configurable: true + } +}) + +Object.defineProperties(Response, { + json: kEnumerableProperty, + redirect: kEnumerableProperty, + error: kEnumerableProperty +}) + +// https://fetch.spec.whatwg.org/#concept-response-clone +function cloneResponse (response) { + // To clone a response response, run these steps: + + // 1. If response is a filtered response, then return a new identical + // filtered response whose internal response is a clone of response’s + // internal response. + if (response.internalResponse) { + return filterResponse( + cloneResponse(response.internalResponse), + response.type + ) + } + + // 2. Let newResponse be a copy of response, except for its body. + const newResponse = makeResponse({ ...response, body: null }) + + // 3. If response’s body is non-null, then set newResponse’s body to the + // result of cloning response’s body. + if (response.body != null) { + newResponse.body = cloneBody(response.body) + } + + // 4. Return newResponse. + return newResponse +} + +function makeResponse (init) { + return { + aborted: false, + rangeRequested: false, + timingAllowPassed: false, + requestIncludesCredentials: false, + type: 'default', + status: 200, + timingInfo: null, + cacheState: '', + statusText: '', + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList(), + urlList: init.urlList ? [...init.urlList] : [] + } +} + +function makeNetworkError (reason) { + const isError = isErrorLike(reason) + return makeResponse({ + type: 'error', + status: 0, + error: isError + ? reason + : new Error(reason ? String(reason) : reason), + aborted: reason && reason.name === 'AbortError' + }) +} + +function makeFilteredResponse (response, state) { + state = { + internalResponse: response, + ...state + } + + return new Proxy(response, { + get (target, p) { + return p in state ? state[p] : target[p] + }, + set (target, p, value) { + assert(!(p in state)) + target[p] = value + return true + } + }) +} + +// https://fetch.spec.whatwg.org/#concept-filtered-response +function filterResponse (response, type) { + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (type === 'basic') { + // A basic filtered response is a filtered response whose type is "basic" + // and header list excludes any headers in internal response’s header list + // whose name is a forbidden response-header name. + + // Note: undici does not implement forbidden response-header names + return makeFilteredResponse(response, { + type: 'basic', + headersList: response.headersList + }) + } else if (type === 'cors') { + // A CORS filtered response is a filtered response whose type is "cors" + // and header list excludes any headers in internal response’s header + // list whose name is not a CORS-safelisted response-header name, given + // internal response’s CORS-exposed header-name list. + + // Note: undici does not implement CORS-safelisted response-header names + return makeFilteredResponse(response, { + type: 'cors', + headersList: response.headersList + }) + } else if (type === 'opaque') { + // An opaque filtered response is a filtered response whose type is + // "opaque", URL list is the empty list, status is 0, status message + // is the empty byte sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaque', + urlList: Object.freeze([]), + status: 0, + statusText: '', + body: null + }) + } else if (type === 'opaqueredirect') { + // An opaque-redirect filtered response is a filtered response whose type + // is "opaqueredirect", status is 0, status message is the empty byte + // sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaqueredirect', + status: 0, + statusText: '', + headersList: [], + body: null + }) + } else { + assert(false) + } +} + +// https://fetch.spec.whatwg.org/#appropriate-network-error +function makeAppropriateNetworkError (fetchParams, err = null) { + // 1. Assert: fetchParams is canceled. + assert(isCancelled(fetchParams)) + + // 2. Return an aborted network error if fetchParams is aborted; + // otherwise return a network error. + return isAborted(fetchParams) + ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) + : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) +} + +// https://whatpr.org/fetch/1392.html#initialize-a-response +function initializeResponse (response, init, body) { + // 1. If init["status"] is not in the range 200 to 599, inclusive, then + // throw a RangeError. + if (init.status !== null && (init.status < 200 || init.status > 599)) { + throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') + } + + // 2. If init["statusText"] does not match the reason-phrase token production, + // then throw a TypeError. + if ('statusText' in init && init.statusText != null) { + // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: + // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) + if (!isValidReasonPhrase(String(init.statusText))) { + throw new TypeError('Invalid statusText') + } + } + + // 3. Set response’s response’s status to init["status"]. + if ('status' in init && init.status != null) { + response[kState].status = init.status + } + + // 4. Set response’s response’s status message to init["statusText"]. + if ('statusText' in init && init.statusText != null) { + response[kState].statusText = init.statusText + } + + // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. + if ('headers' in init && init.headers != null) { + fill(response[kHeaders], init.headers) + } + + // 6. If body was given, then: + if (body) { + // 1. If response's status is a null body status, then throw a TypeError. + if (nullBodyStatus.includes(response.status)) { + throw webidl.errors.exception({ + header: 'Response constructor', + message: 'Invalid response status code ' + response.status + }) + } + + // 2. Set response's body to body's body. + response[kState].body = body.body + + // 3. If body's type is non-null and response's header list does not contain + // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. + if (body.type != null && !response[kState].headersList.contains('Content-Type')) { + response[kState].headersList.append('content-type', body.type) + } + } +} + +webidl.converters.ReadableStream = webidl.interfaceConverter( + ReadableStream +) + +webidl.converters.FormData = webidl.interfaceConverter( + FormData +) + +webidl.converters.URLSearchParams = webidl.interfaceConverter( + URLSearchParams +) + +// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit +webidl.converters.XMLHttpRequestBodyInit = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } + + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + return webidl.converters.BufferSource(V) + } + + if (util.isFormDataLike(V)) { + return webidl.converters.FormData(V, { strict: false }) + } + + if (V instanceof URLSearchParams) { + return webidl.converters.URLSearchParams(V) + } + + return webidl.converters.DOMString(V) +} + +// https://fetch.spec.whatwg.org/#bodyinit +webidl.converters.BodyInit = function (V) { + if (V instanceof ReadableStream) { + return webidl.converters.ReadableStream(V) + } + + // Note: the spec doesn't include async iterables, + // this is an undici extension. + if (V?.[Symbol.asyncIterator]) { + return V + } + + return webidl.converters.XMLHttpRequestBodyInit(V) +} + +webidl.converters.ResponseInit = webidl.dictionaryConverter([ + { + key: 'status', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + }, + { + key: 'statusText', + converter: webidl.converters.ByteString, + defaultValue: '' + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + } +]) + +module.exports = { + makeNetworkError, + makeResponse, + makeAppropriateNetworkError, + filterResponse, + Response, + cloneResponse +} diff --git a/node_modules/undici/lib/fetch/symbols.js b/node_modules/undici/lib/fetch/symbols.js new file mode 100644 index 0000000..0b947d5 --- /dev/null +++ b/node_modules/undici/lib/fetch/symbols.js @@ -0,0 +1,10 @@ +'use strict' + +module.exports = { + kUrl: Symbol('url'), + kHeaders: Symbol('headers'), + kSignal: Symbol('signal'), + kState: Symbol('state'), + kGuard: Symbol('guard'), + kRealm: Symbol('realm') +} diff --git a/node_modules/undici/lib/fetch/util.js b/node_modules/undici/lib/fetch/util.js new file mode 100644 index 0000000..b12142c --- /dev/null +++ b/node_modules/undici/lib/fetch/util.js @@ -0,0 +1,1071 @@ +'use strict' + +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require('./constants') +const { getGlobalOrigin } = require('./global') +const { performance } = require('perf_hooks') +const { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util') +const assert = require('assert') +const { isUint8Array } = require('util/types') + +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto + +try { + crypto = require('crypto') +} catch { + +} + +function responseURL (response) { + // https://fetch.spec.whatwg.org/#responses + // A response has an associated URL. It is a pointer to the last URL + // in response’s URL list and null if response’s URL list is empty. + const urlList = response.urlList + const length = urlList.length + return length === 0 ? null : urlList[length - 1].toString() +} + +// https://fetch.spec.whatwg.org/#concept-response-location-url +function responseLocationURL (response, requestFragment) { + // 1. If response’s status is not a redirect status, then return null. + if (!redirectStatusSet.has(response.status)) { + return null + } + + // 2. Let location be the result of extracting header list values given + // `Location` and response’s header list. + let location = response.headersList.get('location') + + // 3. If location is a header value, then set location to the result of + // parsing location with response’s URL. + if (location !== null && isValidHeaderValue(location)) { + location = new URL(location, responseURL(response)) + } + + // 4. If location is a URL whose fragment is null, then set location’s + // fragment to requestFragment. + if (location && !location.hash) { + location.hash = requestFragment + } + + // 5. Return location. + return location +} + +/** @returns {URL} */ +function requestCurrentURL (request) { + return request.urlList[request.urlList.length - 1] +} + +function requestBadPort (request) { + // 1. Let url be request’s current URL. + const url = requestCurrentURL(request) + + // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, + // then return blocked. + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { + return 'blocked' + } + + // 3. Return allowed. + return 'allowed' +} + +function isErrorLike (object) { + return object instanceof Error || ( + object?.constructor?.name === 'Error' || + object?.constructor?.name === 'DOMException' + ) +} + +// Check whether |statusText| is a ByteString and +// matches the Reason-Phrase token production. +// RFC 2616: https://tools.ietf.org/html/rfc2616 +// RFC 7230: https://tools.ietf.org/html/rfc7230 +// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" +// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 +function isValidReasonPhrase (statusText) { + for (let i = 0; i < statusText.length; ++i) { + const c = statusText.charCodeAt(i) + if ( + !( + ( + c === 0x09 || // HTAB + (c >= 0x20 && c <= 0x7e) || // SP / VCHAR + (c >= 0x80 && c <= 0xff) + ) // obs-text + ) + ) { + return false + } + } + return true +} + +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } +} + +/** + * @param {string} characters + */ +function isValidHTTPToken (characters) { + if (characters.length === 0) { + return false + } + for (let i = 0; i < characters.length; ++i) { + if (!isTokenCharCode(characters.charCodeAt(i))) { + return false + } + } + return true +} + +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ +function isValidHeaderName (potentialValue) { + return isValidHTTPToken(potentialValue) +} + +/** + * @see https://fetch.spec.whatwg.org/#header-value + * @param {string} potentialValue + */ +function isValidHeaderValue (potentialValue) { + // - Has no leading or trailing HTTP tab or space bytes. + // - Contains no 0x00 (NUL) or HTTP newline bytes. + if ( + potentialValue.startsWith('\t') || + potentialValue.startsWith(' ') || + potentialValue.endsWith('\t') || + potentialValue.endsWith(' ') + ) { + return false + } + + if ( + potentialValue.includes('\0') || + potentialValue.includes('\r') || + potentialValue.includes('\n') + ) { + return false + } + + return true +} + +// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect +function setRequestReferrerPolicyOnRedirect (request, actualResponse) { + // Given a request request and a response actualResponse, this algorithm + // updates request’s referrer policy according to the Referrer-Policy + // header (if any) in actualResponse. + + // 1. Let policy be the result of executing § 8.1 Parse a referrer policy + // from a Referrer-Policy header on actualResponse. + + // 8.1 Parse a referrer policy from a Referrer-Policy header + // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. + const { headersList } = actualResponse + // 2. Let policy be the empty string. + // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. + // 4. Return policy. + const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + + // Note: As the referrer-policy can contain multiple policies + // separated by comma, we need to loop through all of them + // and pick the first valid one. + // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy + let policy = '' + if (policyHeader.length > 0) { + // The right-most policy takes precedence. + // The left-most policy is the fallback. + for (let i = policyHeader.length; i !== 0; i--) { + const token = policyHeader[i - 1].trim() + if (referrerPolicyTokens.has(token)) { + policy = token + break + } + } + } + + // 2. If policy is not the empty string, then set request’s referrer policy to policy. + if (policy !== '') { + request.referrerPolicy = policy + } +} + +// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check +function crossOriginResourcePolicyCheck () { + // TODO + return 'allowed' +} + +// https://fetch.spec.whatwg.org/#concept-cors-check +function corsCheck () { + // TODO + return 'success' +} + +// https://fetch.spec.whatwg.org/#concept-tao-check +function TAOCheck () { + // TODO + return 'success' +} + +function appendFetchMetadata (httpRequest) { + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + + // 1. Assert: r’s url is a potentially trustworthy URL. + // TODO + + // 2. Let header be a Structured Header whose value is a token. + let header = null + + // 3. Set header’s value to r’s mode. + header = httpRequest.mode + + // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. + httpRequest.headersList.set('sec-fetch-mode', header) + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header + // TODO +} + +// https://fetch.spec.whatwg.org/#append-a-request-origin-header +function appendRequestOriginHeader (request) { + // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. + let serializedOrigin = request.origin + + // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. + if (request.responseTainting === 'cors' || request.mode === 'websocket') { + if (serializedOrigin) { + request.headersList.append('origin', serializedOrigin) + } + + // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: + } else if (request.method !== 'GET' && request.method !== 'HEAD') { + // 1. Switch on request’s referrer policy: + switch (request.referrerPolicy) { + case 'no-referrer': + // Set serializedOrigin to `null`. + serializedOrigin = null + break + case 'no-referrer-when-downgrade': + case 'strict-origin': + case 'strict-origin-when-cross-origin': + // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { + serializedOrigin = null + } + break + case 'same-origin': + // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. + if (!sameOrigin(request, requestCurrentURL(request))) { + serializedOrigin = null + } + break + default: + // Do nothing. + } + + if (serializedOrigin) { + // 2. Append (`Origin`, serializedOrigin) to request’s header list. + request.headersList.append('origin', serializedOrigin) + } + } +} + +function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { + // TODO + return performance.now() +} + +// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info +function createOpaqueTimingInfo (timingInfo) { + return { + startTime: timingInfo.startTime ?? 0, + redirectStartTime: 0, + redirectEndTime: 0, + postRedirectStartTime: timingInfo.startTime ?? 0, + finalServiceWorkerStartTime: 0, + finalNetworkResponseStartTime: 0, + finalNetworkRequestStartTime: 0, + endTime: 0, + encodedBodySize: 0, + decodedBodySize: 0, + finalConnectionTimingInfo: null + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#policy-container +function makePolicyContainer () { + // Note: the fetch spec doesn't make use of embedder policy or CSP list + return { + referrerPolicy: 'strict-origin-when-cross-origin' + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container +function clonePolicyContainer (policyContainer) { + return { + referrerPolicy: policyContainer.referrerPolicy + } +} + +// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer +function determineRequestsReferrer (request) { + // 1. Let policy be request's referrer policy. + const policy = request.referrerPolicy + + // Note: policy cannot (shouldn't) be null or an empty string. + assert(policy) + + // 2. Let environment be request’s client. + + let referrerSource = null + + // 3. Switch on request’s referrer: + if (request.referrer === 'client') { + // Note: node isn't a browser and doesn't implement document/iframes, + // so we bypass this step and replace it with our own. + + const globalOrigin = getGlobalOrigin() + + if (!globalOrigin || globalOrigin.origin === 'null') { + return 'no-referrer' + } + + // note: we need to clone it as it's mutated + referrerSource = new URL(globalOrigin) + } else if (request.referrer instanceof URL) { + // Let referrerSource be request’s referrer. + referrerSource = request.referrer + } + + // 4. Let request’s referrerURL be the result of stripping referrerSource for + // use as a referrer. + let referrerURL = stripURLForReferrer(referrerSource) + + // 5. Let referrerOrigin be the result of stripping referrerSource for use as + // a referrer, with the origin-only flag set to true. + const referrerOrigin = stripURLForReferrer(referrerSource, true) + + // 6. If the result of serializing referrerURL is a string whose length is + // greater than 4096, set referrerURL to referrerOrigin. + if (referrerURL.toString().length > 4096) { + referrerURL = referrerOrigin + } + + const areSameOrigin = sameOrigin(request, referrerURL) + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && + !isURLPotentiallyTrustworthy(request.url) + + // 8. Execute the switch statements corresponding to the value of policy: + switch (policy) { + case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) + case 'unsafe-url': return referrerURL + case 'same-origin': + return areSameOrigin ? referrerOrigin : 'no-referrer' + case 'origin-when-cross-origin': + return areSameOrigin ? referrerURL : referrerOrigin + case 'strict-origin-when-cross-origin': { + const currentURL = requestCurrentURL(request) + + // 1. If the origin of referrerURL and the origin of request’s current + // URL are the same, then return referrerURL. + if (sameOrigin(referrerURL, currentURL)) { + return referrerURL + } + + // 2. If referrerURL is a potentially trustworthy URL and request’s + // current URL is not a potentially trustworthy URL, then return no + // referrer. + if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { + return 'no-referrer' + } + + // 3. Return referrerOrigin. + return referrerOrigin + } + case 'strict-origin': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + case 'no-referrer-when-downgrade': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + + default: // eslint-disable-line + return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin + } +} + +/** + * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url + * @param {URL} url + * @param {boolean|undefined} originOnly + */ +function stripURLForReferrer (url, originOnly) { + // 1. Assert: url is a URL. + assert(url instanceof URL) + + // 2. If url’s scheme is a local scheme, then return no referrer. + if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { + return 'no-referrer' + } + + // 3. Set url’s username to the empty string. + url.username = '' + + // 4. Set url’s password to the empty string. + url.password = '' + + // 5. Set url’s fragment to null. + url.hash = '' + + // 6. If the origin-only flag is true, then: + if (originOnly) { + // 1. Set url’s path to « the empty string ». + url.pathname = '' + + // 2. Set url’s query to null. + url.search = '' + } + + // 7. Return url. + return url +} + +function isURLPotentiallyTrustworthy (url) { + if (!(url instanceof URL)) { + return false + } + + // If child of about, return true + if (url.href === 'about:blank' || url.href === 'about:srcdoc') { + return true + } + + // If scheme is data, return true + if (url.protocol === 'data:') return true + + // If file, return true + if (url.protocol === 'file:') return true + + return isOriginPotentiallyTrustworthy(url.origin) + + function isOriginPotentiallyTrustworthy (origin) { + // If origin is explicitly null, return false + if (origin == null || origin === 'null') return false + + const originAsURL = new URL(origin) + + // If secure, return true + if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { + return true + } + + // If localhost or variants, return true + if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || + (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || + (originAsURL.hostname.endsWith('.localhost'))) { + return true + } + + // If any other, return false + return false + } +} + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true + } + + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) + + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true + } + + // 3. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true + } + + // 4. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) + + // 5. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo + + // 2. Let expectedValue be the val component of item. + let expectedValue = item.hash + + // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e + // "be liberal with padding". This is annoying, and it's not even in the spec. + + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + + // 3. Let actualValue be the result of applying algorithm to bytes. + let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') + + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) + } + + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { + return true + } + } + + // 6. Return false. + return false +} + +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// https://www.w3.org/TR/CSP2/#source-list-syntax +// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] + + // 2. Let empty be equal to true. + let empty = true + + const supportedHashes = crypto.getHashes() + + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false + + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) + + // 3. If token does not parse, continue to the next token. + if (parsedToken === null || parsedToken.groups === undefined) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } + + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo + + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm.toLowerCase())) { + result.push(parsedToken.groups) + } + } + + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' + } + + return result +} + +// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request +function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { + // TODO +} + +/** + * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} + * @param {URL} A + * @param {URL} B + */ +function sameOrigin (A, B) { + // 1. If A and B are the same opaque origin, then return true. + if (A.origin === B.origin && A.origin === 'null') { + return true + } + + // 2. If A and B are both tuple origins and their schemes, + // hosts, and port are identical, then return true. + if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { + return true + } + + // 3. Return false. + return false +} + +function createDeferredPromise () { + let res + let rej + const promise = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) + + return { promise, resolve: res, reject: rej } +} + +function isAborted (fetchParams) { + return fetchParams.controller.state === 'aborted' +} + +function isCancelled (fetchParams) { + return fetchParams.controller.state === 'aborted' || + fetchParams.controller.state === 'terminated' +} + +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ +function normalizeMethod (method) { + return normalizeMethodRecord[method.toLowerCase()] ?? method +} + +// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string +function serializeJavascriptValueToJSONString (value) { + // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). + const result = JSON.stringify(value) + + // 2. If result is undefined, then throw a TypeError. + if (result === undefined) { + throw new TypeError('Value is not JSON serializable') + } + + // 3. Assert: result is a string. + assert(typeof result === 'string') + + // 4. Return result. + return result +} + +// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object +const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + +/** + * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + * @param {() => unknown[]} iterator + * @param {string} name name of the instance + * @param {'key'|'value'|'key+value'} kind + */ +function makeIterator (iterator, name, kind) { + const object = { + index: 0, + kind, + target: iterator + } + + const i = { + next () { + // 1. Let interface be the interface for which the iterator prototype object exists. + + // 2. Let thisValue be the this value. + + // 3. Let object be ? ToObject(thisValue). + + // 4. If object is a platform object, then perform a security + // check, passing: + + // 5. If object is not a default iterator object for interface, + // then throw a TypeError. + if (Object.getPrototypeOf(this) !== i) { + throw new TypeError( + `'next' called on an object that does not implement interface ${name} Iterator.` + ) + } + + // 6. Let index be object’s index. + // 7. Let kind be object’s kind. + // 8. Let values be object’s target's value pairs to iterate over. + const { index, kind, target } = object + const values = target() + + // 9. Let len be the length of values. + const len = values.length + + // 10. If index is greater than or equal to len, then return + // CreateIterResultObject(undefined, true). + if (index >= len) { + return { value: undefined, done: true } + } + + // 11. Let pair be the entry in values at index index. + const pair = values[index] + + // 12. Set object’s index to index + 1. + object.index = index + 1 + + // 13. Return the iterator result for pair and kind. + return iteratorResult(pair, kind) + }, + // The class string of an iterator prototype object for a given interface is the + // result of concatenating the identifier of the interface and the string " Iterator". + [Symbol.toStringTag]: `${name} Iterator` + } + + // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. + Object.setPrototypeOf(i, esIteratorPrototype) + // esIteratorPrototype needs to be the prototype of i + // which is the prototype of an empty object. Yes, it's confusing. + return Object.setPrototypeOf({}, i) +} + +// https://webidl.spec.whatwg.org/#iterator-result +function iteratorResult (pair, kind) { + let result + + // 1. Let result be a value determined by the value of kind: + switch (kind) { + case 'key': { + // 1. Let idlKey be pair’s key. + // 2. Let key be the result of converting idlKey to an + // ECMAScript value. + // 3. result is key. + result = pair[0] + break + } + case 'value': { + // 1. Let idlValue be pair’s value. + // 2. Let value be the result of converting idlValue to + // an ECMAScript value. + // 3. result is value. + result = pair[1] + break + } + case 'key+value': { + // 1. Let idlKey be pair’s key. + // 2. Let idlValue be pair’s value. + // 3. Let key be the result of converting idlKey to an + // ECMAScript value. + // 4. Let value be the result of converting idlValue to + // an ECMAScript value. + // 5. Let array be ! ArrayCreate(2). + // 6. Call ! CreateDataProperty(array, "0", key). + // 7. Call ! CreateDataProperty(array, "1", value). + // 8. result is array. + result = pair + break + } + } + + // 2. Return CreateIterResultObject(result, false). + return { value: result, done: false } +} + +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. + + // 2. Let successSteps given a byte sequence bytes be to queue a + // fetch task to run processBody given bytes, with taskDestination. + const successSteps = processBody + + // 3. Let errorSteps be to queue a fetch task to run processBodyError, + // with taskDestination. + const errorSteps = processBodyError + + // 4. Let reader be the result of getting a reader for body’s stream. + // If that threw an exception, then run errorSteps with that + // exception and return. + let reader + + try { + reader = body.stream.getReader() + } catch (e) { + errorSteps(e) + return + } + + // 5. Read all bytes from reader, given successSteps and errorSteps. + try { + const result = await readAllBytes(reader) + successSteps(result) + } catch (e) { + errorSteps(e) + } +} + +/** @type {ReadableStream} */ +let ReadableStream = globalThis.ReadableStream + +function isReadableStreamLike (stream) { + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + return stream instanceof ReadableStream || ( + stream[Symbol.toStringTag] === 'ReadableStream' && + typeof stream.tee === 'function' + ) +} + +const MAXIMUM_ARGUMENT_LENGTH = 65535 + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-decode + * @param {number[]|Uint8Array} input + */ +function isomorphicDecode (input) { + // 1. To isomorphic decode a byte sequence input, return a string whose code point + // length is equal to input’s length and whose code points have the same values + // as the values of input’s bytes, in the same order. + + if (input.length < MAXIMUM_ARGUMENT_LENGTH) { + return String.fromCharCode(...input) + } + + return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +} + +/** + * @param {ReadableStreamController} controller + */ +function readableStreamClose (controller) { + try { + controller.close() + } catch (err) { + // TODO: add comment explaining why this error occurs. + if (!err.message.includes('Controller is already closed')) { + throw err + } + } +} + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-encode + * @param {string} input + */ +function isomorphicEncode (input) { + // 1. Assert: input contains no code points greater than U+00FF. + for (let i = 0; i < input.length; i++) { + assert(input.charCodeAt(i) <= 0xFF) + } + + // 2. Return a byte sequence whose length is equal to input’s code + // point length and whose bytes have the same values as the + // values of input’s code points, in the same order + return input +} + +/** + * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes + * @see https://streams.spec.whatwg.org/#read-loop + * @param {ReadableStreamDefaultReader} reader + */ +async function readAllBytes (reader) { + const bytes = [] + let byteLength = 0 + + while (true) { + const { done, value: chunk } = await reader.read() + + if (done) { + // 1. Call successSteps with bytes. + return Buffer.concat(bytes, byteLength) + } + + // 1. If chunk is not a Uint8Array object, call failureSteps + // with a TypeError and abort these steps. + if (!isUint8Array(chunk)) { + throw new TypeError('Received non-Uint8Array chunk') + } + + // 2. Append the bytes represented by chunk to bytes. + bytes.push(chunk) + byteLength += chunk.length + + // 3. Read-loop given reader, bytes, successSteps, and failureSteps. + } +} + +/** + * @see https://fetch.spec.whatwg.org/#is-local + * @param {URL} url + */ +function urlIsLocal (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' +} + +/** + * @param {string|URL} url + */ +function urlHasHttpsScheme (url) { + if (typeof url === 'string') { + return url.startsWith('https:') + } + + return url.protocol === 'https:' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-scheme + * @param {URL} url + */ +function urlIsHttpHttpsScheme (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'http:' || protocol === 'https:' +} + +/** + * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. + */ +const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) + +module.exports = { + isAborted, + isCancelled, + createDeferredPromise, + ReadableStreamFrom, + toUSVString, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + coarsenedSharedCurrentTime, + determineRequestsReferrer, + makePolicyContainer, + clonePolicyContainer, + appendFetchMetadata, + appendRequestOriginHeader, + TAOCheck, + corsCheck, + crossOriginResourcePolicyCheck, + createOpaqueTimingInfo, + setRequestReferrerPolicyOnRedirect, + isValidHTTPToken, + requestBadPort, + requestCurrentURL, + responseURL, + responseLocationURL, + isBlobLike, + isURLPotentiallyTrustworthy, + isValidReasonPhrase, + sameOrigin, + normalizeMethod, + serializeJavascriptValueToJSONString, + makeIterator, + isValidHeaderName, + isValidHeaderValue, + hasOwn, + isErrorLike, + fullyReadBody, + bytesMatch, + isReadableStreamLike, + readableStreamClose, + isomorphicEncode, + isomorphicDecode, + urlIsLocal, + urlHasHttpsScheme, + urlIsHttpHttpsScheme, + readAllBytes, + normalizeMethodRecord +} diff --git a/node_modules/undici/lib/fetch/webidl.js b/node_modules/undici/lib/fetch/webidl.js new file mode 100644 index 0000000..6fcf2ab --- /dev/null +++ b/node_modules/undici/lib/fetch/webidl.js @@ -0,0 +1,646 @@ +'use strict' + +const { types } = require('util') +const { hasOwn, toUSVString } = require('./util') + +/** @type {import('../../types/webidl').Webidl} */ +const webidl = {} +webidl.converters = {} +webidl.util = {} +webidl.errors = {} + +webidl.errors.exception = function (message) { + return new TypeError(`${message.header}: ${message.message}`) +} + +webidl.errors.conversionFailed = function (context) { + const plural = context.types.length === 1 ? '' : ' one of' + const message = + `${context.argument} could not be converted to` + + `${plural}: ${context.types.join(', ')}.` + + return webidl.errors.exception({ + header: context.prefix, + message + }) +} + +webidl.errors.invalidArgument = function (context) { + return webidl.errors.exception({ + header: context.prefix, + message: `"${context.value}" is an invalid ${context.type}.` + }) +} + +// https://webidl.spec.whatwg.org/#implements +webidl.brandCheck = function (V, I, opts = undefined) { + if (opts?.strict !== false && !(V instanceof I)) { + throw new TypeError('Illegal invocation') + } else { + return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] + } +} + +webidl.argumentLengthCheck = function ({ length }, min, ctx) { + if (length < min) { + throw webidl.errors.exception({ + message: `${min} argument${min !== 1 ? 's' : ''} required, ` + + `but${length ? ' only' : ''} ${length} found.`, + ...ctx + }) + } +} + +webidl.illegalConstructor = function () { + throw webidl.errors.exception({ + header: 'TypeError', + message: 'Illegal constructor' + }) +} + +// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +webidl.util.Type = function (V) { + switch (typeof V) { + case 'undefined': return 'Undefined' + case 'boolean': return 'Boolean' + case 'string': return 'String' + case 'symbol': return 'Symbol' + case 'number': return 'Number' + case 'bigint': return 'BigInt' + case 'function': + case 'object': { + if (V === null) { + return 'Null' + } + + return 'Object' + } + } +} + +// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint +webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { + let upperBound + let lowerBound + + // 1. If bitLength is 64, then: + if (bitLength === 64) { + // 1. Let upperBound be 2^53 − 1. + upperBound = Math.pow(2, 53) - 1 + + // 2. If signedness is "unsigned", then let lowerBound be 0. + if (signedness === 'unsigned') { + lowerBound = 0 + } else { + // 3. Otherwise let lowerBound be −2^53 + 1. + lowerBound = Math.pow(-2, 53) + 1 + } + } else if (signedness === 'unsigned') { + // 2. Otherwise, if signedness is "unsigned", then: + + // 1. Let lowerBound be 0. + lowerBound = 0 + + // 2. Let upperBound be 2^bitLength − 1. + upperBound = Math.pow(2, bitLength) - 1 + } else { + // 3. Otherwise: + + // 1. Let lowerBound be -2^bitLength − 1. + lowerBound = Math.pow(-2, bitLength) - 1 + + // 2. Let upperBound be 2^bitLength − 1 − 1. + upperBound = Math.pow(2, bitLength - 1) - 1 + } + + // 4. Let x be ? ToNumber(V). + let x = Number(V) + + // 5. If x is −0, then set x to +0. + if (x === 0) { + x = 0 + } + + // 6. If the conversion is to an IDL type associated + // with the [EnforceRange] extended attribute, then: + if (opts.enforceRange === true) { + // 1. If x is NaN, +∞, or −∞, then throw a TypeError. + if ( + Number.isNaN(x) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Could not convert ${V} to an integer.` + }) + } + + // 2. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 3. If x < lowerBound or x > upperBound, then + // throw a TypeError. + if (x < lowerBound || x > upperBound) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` + }) + } + + // 4. Return x. + return x + } + + // 7. If x is not NaN and the conversion is to an IDL + // type associated with the [Clamp] extended + // attribute, then: + if (!Number.isNaN(x) && opts.clamp === true) { + // 1. Set x to min(max(x, lowerBound), upperBound). + x = Math.min(Math.max(x, lowerBound), upperBound) + + // 2. Round x to the nearest integer, choosing the + // even integer if it lies halfway between two, + // and choosing +0 rather than −0. + if (Math.floor(x) % 2 === 0) { + x = Math.floor(x) + } else { + x = Math.ceil(x) + } + + // 3. Return x. + return x + } + + // 8. If x is NaN, +0, +∞, or −∞, then return +0. + if ( + Number.isNaN(x) || + (x === 0 && Object.is(0, x)) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + return 0 + } + + // 9. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 10. Set x to x modulo 2^bitLength. + x = x % Math.pow(2, bitLength) + + // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, + // then return x − 2^bitLength. + if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { + return x - Math.pow(2, bitLength) + } + + // 12. Otherwise, return x. + return x +} + +// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart +webidl.util.IntegerPart = function (n) { + // 1. Let r be floor(abs(n)). + const r = Math.floor(Math.abs(n)) + + // 2. If n < 0, then return -1 × r. + if (n < 0) { + return -1 * r + } + + // 3. Otherwise, return r. + return r +} + +// https://webidl.spec.whatwg.org/#es-sequence +webidl.sequenceConverter = function (converter) { + return (V) => { + // 1. If Type(V) is not Object, throw a TypeError. + if (webidl.util.Type(V) !== 'Object') { + throw webidl.errors.exception({ + header: 'Sequence', + message: `Value of type ${webidl.util.Type(V)} is not an Object.` + }) + } + + // 2. Let method be ? GetMethod(V, @@iterator). + /** @type {Generator} */ + const method = V?.[Symbol.iterator]?.() + const seq = [] + + // 3. If method is undefined, throw a TypeError. + if ( + method === undefined || + typeof method.next !== 'function' + ) { + throw webidl.errors.exception({ + header: 'Sequence', + message: 'Object is not an iterator.' + }) + } + + // https://webidl.spec.whatwg.org/#create-sequence-from-iterable + while (true) { + const { done, value } = method.next() + + if (done) { + break + } + + seq.push(converter(value)) + } + + return seq + } +} + +// https://webidl.spec.whatwg.org/#es-to-record +webidl.recordConverter = function (keyConverter, valueConverter) { + return (O) => { + // 1. If Type(O) is not Object, throw a TypeError. + if (webidl.util.Type(O) !== 'Object') { + throw webidl.errors.exception({ + header: 'Record', + message: `Value of type ${webidl.util.Type(O)} is not an Object.` + }) + } + + // 2. Let result be a new empty instance of record. + const result = {} + + if (!types.isProxy(O)) { + // Object.keys only returns enumerable properties + const keys = Object.keys(O) + + for (const key of keys) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + + // 5. Return result. + return result + } + + // 3. Let keys be ? O.[[OwnPropertyKeys]](). + const keys = Reflect.ownKeys(O) + + // 4. For each key of keys. + for (const key of keys) { + // 1. Let desc be ? O.[[GetOwnProperty]](key). + const desc = Reflect.getOwnPropertyDescriptor(O, key) + + // 2. If desc is not undefined and desc.[[Enumerable]] is true: + if (desc?.enumerable) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + } + + // 5. Return result. + return result + } +} + +webidl.interfaceConverter = function (i) { + return (V, opts = {}) => { + if (opts.strict !== false && !(V instanceof i)) { + throw webidl.errors.exception({ + header: i.name, + message: `Expected ${V} to be an instance of ${i.name}.` + }) + } + + return V + } +} + +webidl.dictionaryConverter = function (converters) { + return (dictionary) => { + const type = webidl.util.Type(dictionary) + const dict = {} + + if (type === 'Null' || type === 'Undefined') { + return dict + } else if (type !== 'Object') { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` + }) + } + + for (const options of converters) { + const { key, defaultValue, required, converter } = options + + if (required === true) { + if (!hasOwn(dictionary, key)) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Missing required key "${key}".` + }) + } + } + + let value = dictionary[key] + const hasDefault = hasOwn(options, 'defaultValue') + + // Only use defaultValue if value is undefined and + // a defaultValue options was provided. + if (hasDefault && value !== null) { + value = value ?? defaultValue + } + + // A key can be optional and have no default value. + // When this happens, do not perform a conversion, + // and do not assign the key a value. + if (required || hasDefault || value !== undefined) { + value = converter(value) + + if ( + options.allowedValues && + !options.allowedValues.includes(value) + ) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` + }) + } + + dict[key] = value + } + } + + return dict + } +} + +webidl.nullableConverter = function (converter) { + return (V) => { + if (V === null) { + return V + } + + return converter(V) + } +} + +// https://webidl.spec.whatwg.org/#es-DOMString +webidl.converters.DOMString = function (V, opts = {}) { + // 1. If V is null and the conversion is to an IDL type + // associated with the [LegacyNullToEmptyString] + // extended attribute, then return the DOMString value + // that represents the empty string. + if (V === null && opts.legacyNullToEmptyString) { + return '' + } + + // 2. Let x be ? ToString(V). + if (typeof V === 'symbol') { + throw new TypeError('Could not convert argument of type symbol to string.') + } + + // 3. Return the IDL DOMString value that represents the + // same sequence of code units as the one the + // ECMAScript String value x represents. + return String(V) +} + +// https://webidl.spec.whatwg.org/#es-ByteString +webidl.converters.ByteString = function (V) { + // 1. Let x be ? ToString(V). + // Note: DOMString converter perform ? ToString(V) + const x = webidl.converters.DOMString(V) + + // 2. If the value of any element of x is greater than + // 255, then throw a TypeError. + for (let index = 0; index < x.length; index++) { + if (x.charCodeAt(index) > 255) { + throw new TypeError( + 'Cannot convert argument to a ByteString because the character at ' + + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` + ) + } + } + + // 3. Return an IDL ByteString value whose length is the + // length of x, and where the value of each element is + // the value of the corresponding element of x. + return x +} + +// https://webidl.spec.whatwg.org/#es-USVString +webidl.converters.USVString = toUSVString + +// https://webidl.spec.whatwg.org/#es-boolean +webidl.converters.boolean = function (V) { + // 1. Let x be the result of computing ToBoolean(V). + const x = Boolean(V) + + // 2. Return the IDL boolean value that is the one that represents + // the same truth value as the ECMAScript Boolean value x. + return x +} + +// https://webidl.spec.whatwg.org/#es-any +webidl.converters.any = function (V) { + return V +} + +// https://webidl.spec.whatwg.org/#es-long-long +webidl.converters['long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "signed"). + const x = webidl.util.ConvertToInt(V, 64, 'signed') + + // 2. Return the IDL long long value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long-long +webidl.converters['unsigned long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). + const x = webidl.util.ConvertToInt(V, 64, 'unsigned') + + // 2. Return the IDL unsigned long long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long +webidl.converters['unsigned long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). + const x = webidl.util.ConvertToInt(V, 32, 'unsigned') + + // 2. Return the IDL unsigned long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-short +webidl.converters['unsigned short'] = function (V, opts) { + // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). + const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) + + // 2. Return the IDL unsigned short value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#idl-ArrayBuffer +webidl.converters.ArrayBuffer = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have an + // [[ArrayBufferData]] internal slot, then throw a + // TypeError. + // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances + // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances + if ( + webidl.util.Type(V) !== 'Object' || + !types.isAnyArrayBuffer(V) + ) { + throw webidl.errors.conversionFailed({ + prefix: `${V}`, + argument: `${V}`, + types: ['ArrayBuffer'] + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V) is true, then throw a + // TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V) is true, then throw a + // TypeError. + // Note: resizable ArrayBuffers are currently a proposal. + + // 4. Return the IDL ArrayBuffer value that is a + // reference to the same object as V. + return V +} + +webidl.converters.TypedArray = function (V, T, opts = {}) { + // 1. Let T be the IDL type V is being converted to. + + // 2. If Type(V) is not Object, or V does not have a + // [[TypedArrayName]] internal slot with a value + // equal to T’s name, then throw a TypeError. + if ( + webidl.util.Type(V) !== 'Object' || + !types.isTypedArray(V) || + V.constructor.name !== T.name + ) { + throw webidl.errors.conversionFailed({ + prefix: `${T.name}`, + argument: `${V}`, + types: [T.name] + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 4. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable array buffers are currently a proposal + + // 5. Return the IDL value of type T that is a reference + // to the same object as V. + return V +} + +webidl.converters.DataView = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have a + // [[DataView]] internal slot, then throw a TypeError. + if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { + throw webidl.errors.exception({ + header: 'DataView', + message: 'Object is not a DataView.' + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, + // then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable ArrayBuffers are currently a proposal + + // 4. Return the IDL DataView value that is a reference + // to the same object as V. + return V +} + +// https://webidl.spec.whatwg.org/#BufferSource +webidl.converters.BufferSource = function (V, opts = {}) { + if (types.isAnyArrayBuffer(V)) { + return webidl.converters.ArrayBuffer(V, opts) + } + + if (types.isTypedArray(V)) { + return webidl.converters.TypedArray(V, V.constructor) + } + + if (types.isDataView(V)) { + return webidl.converters.DataView(V, opts) + } + + throw new TypeError(`Could not convert ${V} to a BufferSource.`) +} + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.ByteString +) + +webidl.converters['sequence>'] = webidl.sequenceConverter( + webidl.converters['sequence'] +) + +webidl.converters['record'] = webidl.recordConverter( + webidl.converters.ByteString, + webidl.converters.ByteString +) + +module.exports = { + webidl +} diff --git a/node_modules/undici/lib/fileapi/encoding.js b/node_modules/undici/lib/fileapi/encoding.js new file mode 100644 index 0000000..1d1d2b6 --- /dev/null +++ b/node_modules/undici/lib/fileapi/encoding.js @@ -0,0 +1,290 @@ +'use strict' + +/** + * @see https://encoding.spec.whatwg.org/#concept-encoding-get + * @param {string|undefined} label + */ +function getEncoding (label) { + if (!label) { + return 'failure' + } + + // 1. Remove any leading and trailing ASCII whitespace from label. + // 2. If label is an ASCII case-insensitive match for any of the + // labels listed in the table below, then return the + // corresponding encoding; otherwise return failure. + switch (label.trim().toLowerCase()) { + case 'unicode-1-1-utf-8': + case 'unicode11utf8': + case 'unicode20utf8': + case 'utf-8': + case 'utf8': + case 'x-unicode20utf8': + return 'UTF-8' + case '866': + case 'cp866': + case 'csibm866': + case 'ibm866': + return 'IBM866' + case 'csisolatin2': + case 'iso-8859-2': + case 'iso-ir-101': + case 'iso8859-2': + case 'iso88592': + case 'iso_8859-2': + case 'iso_8859-2:1987': + case 'l2': + case 'latin2': + return 'ISO-8859-2' + case 'csisolatin3': + case 'iso-8859-3': + case 'iso-ir-109': + case 'iso8859-3': + case 'iso88593': + case 'iso_8859-3': + case 'iso_8859-3:1988': + case 'l3': + case 'latin3': + return 'ISO-8859-3' + case 'csisolatin4': + case 'iso-8859-4': + case 'iso-ir-110': + case 'iso8859-4': + case 'iso88594': + case 'iso_8859-4': + case 'iso_8859-4:1988': + case 'l4': + case 'latin4': + return 'ISO-8859-4' + case 'csisolatincyrillic': + case 'cyrillic': + case 'iso-8859-5': + case 'iso-ir-144': + case 'iso8859-5': + case 'iso88595': + case 'iso_8859-5': + case 'iso_8859-5:1988': + return 'ISO-8859-5' + case 'arabic': + case 'asmo-708': + case 'csiso88596e': + case 'csiso88596i': + case 'csisolatinarabic': + case 'ecma-114': + case 'iso-8859-6': + case 'iso-8859-6-e': + case 'iso-8859-6-i': + case 'iso-ir-127': + case 'iso8859-6': + case 'iso88596': + case 'iso_8859-6': + case 'iso_8859-6:1987': + return 'ISO-8859-6' + case 'csisolatingreek': + case 'ecma-118': + case 'elot_928': + case 'greek': + case 'greek8': + case 'iso-8859-7': + case 'iso-ir-126': + case 'iso8859-7': + case 'iso88597': + case 'iso_8859-7': + case 'iso_8859-7:1987': + case 'sun_eu_greek': + return 'ISO-8859-7' + case 'csiso88598e': + case 'csisolatinhebrew': + case 'hebrew': + case 'iso-8859-8': + case 'iso-8859-8-e': + case 'iso-ir-138': + case 'iso8859-8': + case 'iso88598': + case 'iso_8859-8': + case 'iso_8859-8:1988': + case 'visual': + return 'ISO-8859-8' + case 'csiso88598i': + case 'iso-8859-8-i': + case 'logical': + return 'ISO-8859-8-I' + case 'csisolatin6': + case 'iso-8859-10': + case 'iso-ir-157': + case 'iso8859-10': + case 'iso885910': + case 'l6': + case 'latin6': + return 'ISO-8859-10' + case 'iso-8859-13': + case 'iso8859-13': + case 'iso885913': + return 'ISO-8859-13' + case 'iso-8859-14': + case 'iso8859-14': + case 'iso885914': + return 'ISO-8859-14' + case 'csisolatin9': + case 'iso-8859-15': + case 'iso8859-15': + case 'iso885915': + case 'iso_8859-15': + case 'l9': + return 'ISO-8859-15' + case 'iso-8859-16': + return 'ISO-8859-16' + case 'cskoi8r': + case 'koi': + case 'koi8': + case 'koi8-r': + case 'koi8_r': + return 'KOI8-R' + case 'koi8-ru': + case 'koi8-u': + return 'KOI8-U' + case 'csmacintosh': + case 'mac': + case 'macintosh': + case 'x-mac-roman': + return 'macintosh' + case 'iso-8859-11': + case 'iso8859-11': + case 'iso885911': + case 'tis-620': + case 'windows-874': + return 'windows-874' + case 'cp1250': + case 'windows-1250': + case 'x-cp1250': + return 'windows-1250' + case 'cp1251': + case 'windows-1251': + case 'x-cp1251': + return 'windows-1251' + case 'ansi_x3.4-1968': + case 'ascii': + case 'cp1252': + case 'cp819': + case 'csisolatin1': + case 'ibm819': + case 'iso-8859-1': + case 'iso-ir-100': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'iso_8859-1:1987': + case 'l1': + case 'latin1': + case 'us-ascii': + case 'windows-1252': + case 'x-cp1252': + return 'windows-1252' + case 'cp1253': + case 'windows-1253': + case 'x-cp1253': + return 'windows-1253' + case 'cp1254': + case 'csisolatin5': + case 'iso-8859-9': + case 'iso-ir-148': + case 'iso8859-9': + case 'iso88599': + case 'iso_8859-9': + case 'iso_8859-9:1989': + case 'l5': + case 'latin5': + case 'windows-1254': + case 'x-cp1254': + return 'windows-1254' + case 'cp1255': + case 'windows-1255': + case 'x-cp1255': + return 'windows-1255' + case 'cp1256': + case 'windows-1256': + case 'x-cp1256': + return 'windows-1256' + case 'cp1257': + case 'windows-1257': + case 'x-cp1257': + return 'windows-1257' + case 'cp1258': + case 'windows-1258': + case 'x-cp1258': + return 'windows-1258' + case 'x-mac-cyrillic': + case 'x-mac-ukrainian': + return 'x-mac-cyrillic' + case 'chinese': + case 'csgb2312': + case 'csiso58gb231280': + case 'gb2312': + case 'gb_2312': + case 'gb_2312-80': + case 'gbk': + case 'iso-ir-58': + case 'x-gbk': + return 'GBK' + case 'gb18030': + return 'gb18030' + case 'big5': + case 'big5-hkscs': + case 'cn-big5': + case 'csbig5': + case 'x-x-big5': + return 'Big5' + case 'cseucpkdfmtjapanese': + case 'euc-jp': + case 'x-euc-jp': + return 'EUC-JP' + case 'csiso2022jp': + case 'iso-2022-jp': + return 'ISO-2022-JP' + case 'csshiftjis': + case 'ms932': + case 'ms_kanji': + case 'shift-jis': + case 'shift_jis': + case 'sjis': + case 'windows-31j': + case 'x-sjis': + return 'Shift_JIS' + case 'cseuckr': + case 'csksc56011987': + case 'euc-kr': + case 'iso-ir-149': + case 'korean': + case 'ks_c_5601-1987': + case 'ks_c_5601-1989': + case 'ksc5601': + case 'ksc_5601': + case 'windows-949': + return 'EUC-KR' + case 'csiso2022kr': + case 'hz-gb-2312': + case 'iso-2022-cn': + case 'iso-2022-cn-ext': + case 'iso-2022-kr': + case 'replacement': + return 'replacement' + case 'unicodefffe': + case 'utf-16be': + return 'UTF-16BE' + case 'csunicode': + case 'iso-10646-ucs-2': + case 'ucs-2': + case 'unicode': + case 'unicodefeff': + case 'utf-16': + case 'utf-16le': + return 'UTF-16LE' + case 'x-user-defined': + return 'x-user-defined' + default: return 'failure' + } +} + +module.exports = { + getEncoding +} diff --git a/node_modules/undici/lib/fileapi/filereader.js b/node_modules/undici/lib/fileapi/filereader.js new file mode 100644 index 0000000..cd36a22 --- /dev/null +++ b/node_modules/undici/lib/fileapi/filereader.js @@ -0,0 +1,344 @@ +'use strict' + +const { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} = require('./util') +const { + kState, + kError, + kResult, + kEvents, + kAborted +} = require('./symbols') +const { webidl } = require('../fetch/webidl') +const { kEnumerableProperty } = require('../core/util') + +class FileReader extends EventTarget { + constructor () { + super() + + this[kState] = 'empty' + this[kResult] = null + this[kError] = null + this[kEvents] = { + loadend: null, + error: null, + abort: null, + load: null, + progress: null, + loadstart: null + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer + * @param {import('buffer').Blob} blob + */ + readAsArrayBuffer (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsArrayBuffer(blob) method, when invoked, + // must initiate a read operation for blob with ArrayBuffer. + readOperation(this, blob, 'ArrayBuffer') + } + + /** + * @see https://w3c.github.io/FileAPI/#readAsBinaryString + * @param {import('buffer').Blob} blob + */ + readAsBinaryString (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsBinaryString(blob) method, when invoked, + // must initiate a read operation for blob with BinaryString. + readOperation(this, blob, 'BinaryString') + } + + /** + * @see https://w3c.github.io/FileAPI/#readAsDataText + * @param {import('buffer').Blob} blob + * @param {string?} encoding + */ + readAsText (blob, encoding = undefined) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + if (encoding !== undefined) { + encoding = webidl.converters.DOMString(encoding) + } + + // The readAsText(blob, encoding) method, when invoked, + // must initiate a read operation for blob with Text and encoding. + readOperation(this, blob, 'Text', encoding) + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL + * @param {import('buffer').Blob} blob + */ + readAsDataURL (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsDataURL(blob) method, when invoked, must + // initiate a read operation for blob with DataURL. + readOperation(this, blob, 'DataURL') + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-abort + */ + abort () { + // 1. If this's state is "empty" or if this's state is + // "done" set this's result to null and terminate + // this algorithm. + if (this[kState] === 'empty' || this[kState] === 'done') { + this[kResult] = null + return + } + + // 2. If this's state is "loading" set this's state to + // "done" and set this's result to null. + if (this[kState] === 'loading') { + this[kState] = 'done' + this[kResult] = null + } + + // 3. If there are any tasks from this on the file reading + // task source in an affiliated task queue, then remove + // those tasks from that task queue. + this[kAborted] = true + + // 4. Terminate the algorithm for the read method being processed. + // TODO + + // 5. Fire a progress event called abort at this. + fireAProgressEvent('abort', this) + + // 6. If this's state is not "loading", fire a progress + // event called loadend at this. + if (this[kState] !== 'loading') { + fireAProgressEvent('loadend', this) + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate + */ + get readyState () { + webidl.brandCheck(this, FileReader) + + switch (this[kState]) { + case 'empty': return this.EMPTY + case 'loading': return this.LOADING + case 'done': return this.DONE + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-result + */ + get result () { + webidl.brandCheck(this, FileReader) + + // The result attribute’s getter, when invoked, must return + // this's result. + return this[kResult] + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-error + */ + get error () { + webidl.brandCheck(this, FileReader) + + // The error attribute’s getter, when invoked, must return + // this's error. + return this[kError] + } + + get onloadend () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadend + } + + set onloadend (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadend) { + this.removeEventListener('loadend', this[kEvents].loadend) + } + + if (typeof fn === 'function') { + this[kEvents].loadend = fn + this.addEventListener('loadend', fn) + } else { + this[kEvents].loadend = null + } + } + + get onerror () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].error + } + + set onerror (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].error) { + this.removeEventListener('error', this[kEvents].error) + } + + if (typeof fn === 'function') { + this[kEvents].error = fn + this.addEventListener('error', fn) + } else { + this[kEvents].error = null + } + } + + get onloadstart () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadstart + } + + set onloadstart (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadstart) { + this.removeEventListener('loadstart', this[kEvents].loadstart) + } + + if (typeof fn === 'function') { + this[kEvents].loadstart = fn + this.addEventListener('loadstart', fn) + } else { + this[kEvents].loadstart = null + } + } + + get onprogress () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].progress + } + + set onprogress (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].progress) { + this.removeEventListener('progress', this[kEvents].progress) + } + + if (typeof fn === 'function') { + this[kEvents].progress = fn + this.addEventListener('progress', fn) + } else { + this[kEvents].progress = null + } + } + + get onload () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].load + } + + set onload (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].load) { + this.removeEventListener('load', this[kEvents].load) + } + + if (typeof fn === 'function') { + this[kEvents].load = fn + this.addEventListener('load', fn) + } else { + this[kEvents].load = null + } + } + + get onabort () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].abort + } + + set onabort (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].abort) { + this.removeEventListener('abort', this[kEvents].abort) + } + + if (typeof fn === 'function') { + this[kEvents].abort = fn + this.addEventListener('abort', fn) + } else { + this[kEvents].abort = null + } + } +} + +// https://w3c.github.io/FileAPI/#dom-filereader-empty +FileReader.EMPTY = FileReader.prototype.EMPTY = 0 +// https://w3c.github.io/FileAPI/#dom-filereader-loading +FileReader.LOADING = FileReader.prototype.LOADING = 1 +// https://w3c.github.io/FileAPI/#dom-filereader-done +FileReader.DONE = FileReader.prototype.DONE = 2 + +Object.defineProperties(FileReader.prototype, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors, + readAsArrayBuffer: kEnumerableProperty, + readAsBinaryString: kEnumerableProperty, + readAsText: kEnumerableProperty, + readAsDataURL: kEnumerableProperty, + abort: kEnumerableProperty, + readyState: kEnumerableProperty, + result: kEnumerableProperty, + error: kEnumerableProperty, + onloadstart: kEnumerableProperty, + onprogress: kEnumerableProperty, + onload: kEnumerableProperty, + onabort: kEnumerableProperty, + onerror: kEnumerableProperty, + onloadend: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'FileReader', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(FileReader, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors +}) + +module.exports = { + FileReader +} diff --git a/node_modules/undici/lib/fileapi/progressevent.js b/node_modules/undici/lib/fileapi/progressevent.js new file mode 100644 index 0000000..778cf22 --- /dev/null +++ b/node_modules/undici/lib/fileapi/progressevent.js @@ -0,0 +1,78 @@ +'use strict' + +const { webidl } = require('../fetch/webidl') + +const kState = Symbol('ProgressEvent state') + +/** + * @see https://xhr.spec.whatwg.org/#progressevent + */ +class ProgressEvent extends Event { + constructor (type, eventInitDict = {}) { + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) + + super(type, eventInitDict) + + this[kState] = { + lengthComputable: eventInitDict.lengthComputable, + loaded: eventInitDict.loaded, + total: eventInitDict.total + } + } + + get lengthComputable () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].lengthComputable + } + + get loaded () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].loaded + } + + get total () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].total + } +} + +webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ + { + key: 'lengthComputable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'loaded', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'total', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +]) + +module.exports = { + ProgressEvent +} diff --git a/node_modules/undici/lib/fileapi/symbols.js b/node_modules/undici/lib/fileapi/symbols.js new file mode 100644 index 0000000..dd11746 --- /dev/null +++ b/node_modules/undici/lib/fileapi/symbols.js @@ -0,0 +1,10 @@ +'use strict' + +module.exports = { + kState: Symbol('FileReader state'), + kResult: Symbol('FileReader result'), + kError: Symbol('FileReader error'), + kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), + kEvents: Symbol('FileReader events'), + kAborted: Symbol('FileReader aborted') +} diff --git a/node_modules/undici/lib/fileapi/util.js b/node_modules/undici/lib/fileapi/util.js new file mode 100644 index 0000000..1d10899 --- /dev/null +++ b/node_modules/undici/lib/fileapi/util.js @@ -0,0 +1,392 @@ +'use strict' + +const { + kState, + kError, + kResult, + kAborted, + kLastProgressEventFired +} = require('./symbols') +const { ProgressEvent } = require('./progressevent') +const { getEncoding } = require('./encoding') +const { DOMException } = require('../fetch/constants') +const { serializeAMimeType, parseMIMEType } = require('../fetch/dataURL') +const { types } = require('util') +const { StringDecoder } = require('string_decoder') +const { btoa } = require('buffer') + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +/** + * @see https://w3c.github.io/FileAPI/#readOperation + * @param {import('./filereader').FileReader} fr + * @param {import('buffer').Blob} blob + * @param {string} type + * @param {string?} encodingName + */ +function readOperation (fr, blob, type, encodingName) { + // 1. If fr’s state is "loading", throw an InvalidStateError + // DOMException. + if (fr[kState] === 'loading') { + throw new DOMException('Invalid state', 'InvalidStateError') + } + + // 2. Set fr’s state to "loading". + fr[kState] = 'loading' + + // 3. Set fr’s result to null. + fr[kResult] = null + + // 4. Set fr’s error to null. + fr[kError] = null + + // 5. Let stream be the result of calling get stream on blob. + /** @type {import('stream/web').ReadableStream} */ + const stream = blob.stream() + + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader() + + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const bytes = [] + + // 8. Let chunkPromise be the result of reading a chunk from + // stream with reader. + let chunkPromise = reader.read() + + // 9. Let isFirstChunk be true. + let isFirstChunk = true + + // 10. In parallel, while true: + // Note: "In parallel" just means non-blocking + // Note 2: readOperation itself cannot be async as double + // reading the body would then reject the promise, instead + // of throwing an error. + ;(async () => { + while (!fr[kAborted]) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const { done, value } = await chunkPromise + + // 2. If chunkPromise is fulfilled, and isFirstChunk is + // true, queue a task to fire a progress event called + // loadstart at fr. + if (isFirstChunk && !fr[kAborted]) { + queueMicrotask(() => { + fireAProgressEvent('loadstart', fr) + }) + } + + // 3. Set isFirstChunk to false. + isFirstChunk = false + + // 4. If chunkPromise is fulfilled with an object whose + // done property is false and whose value property is + // a Uint8Array object, run these steps: + if (!done && types.isUint8Array(value)) { + // 1. Let bs be the byte sequence represented by the + // Uint8Array object. + + // 2. Append bs to bytes. + bytes.push(value) + + // 3. If roughly 50ms have passed since these steps + // were last invoked, queue a task to fire a + // progress event called progress at fr. + if ( + ( + fr[kLastProgressEventFired] === undefined || + Date.now() - fr[kLastProgressEventFired] >= 50 + ) && + !fr[kAborted] + ) { + fr[kLastProgressEventFired] = Date.now() + queueMicrotask(() => { + fireAProgressEvent('progress', fr) + }) + } + + // 4. Set chunkPromise to the result of reading a + // chunk from stream with reader. + chunkPromise = reader.read() + } else if (done) { + // 5. Otherwise, if chunkPromise is fulfilled with an + // object whose done property is true, queue a task + // to run the following steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Let result be the result of package data given + // bytes, type, blob’s type, and encodingName. + try { + const result = packageData(bytes, type, blob.type, encodingName) + + // 4. Else: + + if (fr[kAborted]) { + return + } + + // 1. Set fr’s result to result. + fr[kResult] = result + + // 2. Fire a progress event called load at the fr. + fireAProgressEvent('load', fr) + } catch (error) { + // 3. If package data threw an exception error: + + // 1. Set fr’s error to error. + fr[kError] = error + + // 2. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + } + + // 5. If fr’s state is not "loading", fire a progress + // event called loadend at the fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break + } + } catch (error) { + if (fr[kAborted]) { + return + } + + // 6. Otherwise, if chunkPromise is rejected with an + // error error, queue a task to run the following + // steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Set fr’s error to error. + fr[kError] = error + + // 3. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + + // 4. If fr’s state is not "loading", fire a progress + // event called loadend at fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break + } + } + })() +} + +/** + * @see https://w3c.github.io/FileAPI/#fire-a-progress-event + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e The name of the event + * @param {import('./filereader').FileReader} reader + */ +function fireAProgressEvent (e, reader) { + // The progress event e does not bubble. e.bubbles must be false + // The progress event e is NOT cancelable. e.cancelable must be false + const event = new ProgressEvent(e, { + bubbles: false, + cancelable: false + }) + + reader.dispatchEvent(event) +} + +/** + * @see https://w3c.github.io/FileAPI/#blob-package-data + * @param {Uint8Array[]} bytes + * @param {string} type + * @param {string?} mimeType + * @param {string?} encodingName + */ +function packageData (bytes, type, mimeType, encodingName) { + // 1. A Blob has an associated package data algorithm, given + // bytes, a type, a optional mimeType, and a optional + // encodingName, which switches on type and runs the + // associated steps: + + switch (type) { + case 'DataURL': { + // 1. Return bytes as a DataURL [RFC2397] subject to + // the considerations below: + // * Use mimeType as part of the Data URL if it is + // available in keeping with the Data URL + // specification [RFC2397]. + // * If mimeType is not available return a Data URL + // without a media-type. [RFC2397]. + + // https://datatracker.ietf.org/doc/html/rfc2397#section-3 + // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + // mediatype := [ type "/" subtype ] *( ";" parameter ) + // data := *urlchar + // parameter := attribute "=" value + let dataURL = 'data:' + + const parsed = parseMIMEType(mimeType || 'application/octet-stream') + + if (parsed !== 'failure') { + dataURL += serializeAMimeType(parsed) + } + + dataURL += ';base64,' + + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + dataURL += btoa(decoder.write(chunk)) + } + + dataURL += btoa(decoder.end()) + + return dataURL + } + case 'Text': { + // 1. Let encoding be failure + let encoding = 'failure' + + // 2. If the encodingName is present, set encoding to the + // result of getting an encoding from encodingName. + if (encodingName) { + encoding = getEncoding(encodingName) + } + + // 3. If encoding is failure, and mimeType is present: + if (encoding === 'failure' && mimeType) { + // 1. Let type be the result of parse a MIME type + // given mimeType. + const type = parseMIMEType(mimeType) + + // 2. If type is not failure, set encoding to the result + // of getting an encoding from type’s parameters["charset"]. + if (type !== 'failure') { + encoding = getEncoding(type.parameters.get('charset')) + } + } + + // 4. If encoding is failure, then set encoding to UTF-8. + if (encoding === 'failure') { + encoding = 'UTF-8' + } + + // 5. Decode bytes using fallback encoding encoding, and + // return the result. + return decode(bytes, encoding) + } + case 'ArrayBuffer': { + // Return a new ArrayBuffer whose contents are bytes. + const sequence = combineByteSequences(bytes) + + return sequence.buffer + } + case 'BinaryString': { + // Return bytes as a binary string, in which every byte + // is represented by a code unit of equal value [0..255]. + let binaryString = '' + + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + binaryString += decoder.write(chunk) + } + + binaryString += decoder.end() + + return binaryString + } + } +} + +/** + * @see https://encoding.spec.whatwg.org/#decode + * @param {Uint8Array[]} ioQueue + * @param {string} encoding + */ +function decode (ioQueue, encoding) { + const bytes = combineByteSequences(ioQueue) + + // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. + const BOMEncoding = BOMSniffing(bytes) + + let slice = 0 + + // 2. If BOMEncoding is non-null: + if (BOMEncoding !== null) { + // 1. Set encoding to BOMEncoding. + encoding = BOMEncoding + + // 2. Read three bytes from ioQueue, if BOMEncoding is + // UTF-8; otherwise read two bytes. + // (Do nothing with those bytes.) + slice = BOMEncoding === 'UTF-8' ? 3 : 2 + } + + // 3. Process a queue with an instance of encoding’s + // decoder, ioQueue, output, and "replacement". + + // 4. Return output. + + const sliced = bytes.slice(slice) + return new TextDecoder(encoding).decode(sliced) +} + +/** + * @see https://encoding.spec.whatwg.org/#bom-sniff + * @param {Uint8Array} ioQueue + */ +function BOMSniffing (ioQueue) { + // 1. Let BOM be the result of peeking 3 bytes from ioQueue, + // converted to a byte sequence. + const [a, b, c] = ioQueue + + // 2. For each of the rows in the table below, starting with + // the first one and going down, if BOM starts with the + // bytes given in the first column, then return the + // encoding given in the cell in the second column of that + // row. Otherwise, return null. + if (a === 0xEF && b === 0xBB && c === 0xBF) { + return 'UTF-8' + } else if (a === 0xFE && b === 0xFF) { + return 'UTF-16BE' + } else if (a === 0xFF && b === 0xFE) { + return 'UTF-16LE' + } + + return null +} + +/** + * @param {Uint8Array[]} sequences + */ +function combineByteSequences (sequences) { + const size = sequences.reduce((a, b) => { + return a + b.byteLength + }, 0) + + let offset = 0 + + return sequences.reduce((a, b) => { + a.set(b, offset) + offset += b.byteLength + return a + }, new Uint8Array(size)) +} + +module.exports = { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} diff --git a/node_modules/undici/lib/global.js b/node_modules/undici/lib/global.js new file mode 100644 index 0000000..18bfd73 --- /dev/null +++ b/node_modules/undici/lib/global.js @@ -0,0 +1,32 @@ +'use strict' + +// We include a version number for the Dispatcher API. In case of breaking changes, +// this version number must be increased to avoid conflicts. +const globalDispatcher = Symbol.for('undici.globalDispatcher.1') +const { InvalidArgumentError } = require('./core/errors') +const Agent = require('./agent') + +if (getGlobalDispatcher() === undefined) { + setGlobalDispatcher(new Agent()) +} + +function setGlobalDispatcher (agent) { + if (!agent || typeof agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument agent must implement Agent') + } + Object.defineProperty(globalThis, globalDispatcher, { + value: agent, + writable: true, + enumerable: false, + configurable: false + }) +} + +function getGlobalDispatcher () { + return globalThis[globalDispatcher] +} + +module.exports = { + setGlobalDispatcher, + getGlobalDispatcher +} diff --git a/node_modules/undici/lib/handler/DecoratorHandler.js b/node_modules/undici/lib/handler/DecoratorHandler.js new file mode 100644 index 0000000..9d70a76 --- /dev/null +++ b/node_modules/undici/lib/handler/DecoratorHandler.js @@ -0,0 +1,35 @@ +'use strict' + +module.exports = class DecoratorHandler { + constructor (handler) { + this.handler = handler + } + + onConnect (...args) { + return this.handler.onConnect(...args) + } + + onError (...args) { + return this.handler.onError(...args) + } + + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } + + onHeaders (...args) { + return this.handler.onHeaders(...args) + } + + onData (...args) { + return this.handler.onData(...args) + } + + onComplete (...args) { + return this.handler.onComplete(...args) + } + + onBodySent (...args) { + return this.handler.onBodySent(...args) + } +} diff --git a/node_modules/undici/lib/handler/RedirectHandler.js b/node_modules/undici/lib/handler/RedirectHandler.js new file mode 100644 index 0000000..baca27e --- /dev/null +++ b/node_modules/undici/lib/handler/RedirectHandler.js @@ -0,0 +1,216 @@ +'use strict' + +const util = require('../core/util') +const { kBodyUsed } = require('../core/symbols') +const assert = require('assert') +const { InvalidArgumentError } = require('../core/errors') +const EE = require('events') + +const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] + +const kBody = Symbol('body') + +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false + } + + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] + } +} + +class RedirectHandler { + constructor (dispatch, maxRedirections, opts, handler) { + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + util.validateHandler(handler, opts.method, opts.upgrade) + + this.dispatch = dispatch + this.location = null + this.abort = null + this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy + this.maxRedirections = maxRedirections + this.handler = handler + this.history = [] + + if (util.isStream(this.opts.body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (util.bodyLength(this.opts.body) === 0) { + this.opts.body + .on('data', function () { + assert(false) + }) + } + + if (typeof this.opts.body.readableDidRead !== 'boolean') { + this.opts.body[kBodyUsed] = false + EE.prototype.on.call(this.opts.body, 'data', function () { + this[kBodyUsed] = true + }) + } + } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + this.opts.body = new BodyAsyncIterable(this.opts.body) + } else if ( + this.opts.body && + typeof this.opts.body !== 'string' && + !ArrayBuffer.isView(this.opts.body) && + util.isIterable(this.opts.body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + this.opts.body = new BodyAsyncIterable(this.opts.body) + } + } + + onConnect (abort) { + this.abort = abort + this.handler.onConnect(abort, { history: this.history }) + } + + onUpgrade (statusCode, headers, socket) { + this.handler.onUpgrade(statusCode, headers, socket) + } + + onError (error) { + this.handler.onError(error) + } + + onHeaders (statusCode, headers, resume, statusText) { + this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) + ? null + : parseLocation(statusCode, headers) + + if (this.opts.origin) { + this.history.push(new URL(this.opts.path, this.opts.origin)) + } + + if (!this.location) { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } + + const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) + const path = search ? `${pathname}${search}` : pathname + + // Remove headers referring to the original URL. + // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. + // https://tools.ietf.org/html/rfc7231#section-6.4 + this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) + this.opts.path = path + this.opts.origin = origin + this.opts.maxRedirections = 0 + this.opts.query = null + + // https://tools.ietf.org/html/rfc7231#section-6.4.4 + // In case of HTTP 303, always replace method to be either HEAD or GET + if (statusCode === 303 && this.opts.method !== 'HEAD') { + this.opts.method = 'GET' + this.opts.body = null + } + } + + onData (chunk) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response bodies. + + Redirection is used to serve the requested resource from another URL, so it is assumes that + no body is generated (and thus can be ignored). Even though generating a body is not prohibited. + + For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually + (which means it's optional and not mandated) contain just an hyperlink to the value of + the Location response header, so the body can be ignored safely. + + For status 300, which is "Multiple Choices", the spec mentions both generating a Location + response header AND a response body with the other possible location to follow. + Since the spec explicitily chooses not to specify a format for such body and leave it to + servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. + */ + } else { + return this.handler.onData(chunk) + } + } + + onComplete (trailers) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections + and neither are useful if present. + + See comment on onData method above for more detailed informations. + */ + + this.location = null + this.abort = null + + this.dispatch(this.opts, this) + } else { + this.handler.onComplete(trailers) + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) { + this.handler.onBodySent(chunk) + } + } +} + +function parseLocation (statusCode, headers) { + if (redirectableStatusCodes.indexOf(statusCode) === -1) { + return null + } + + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toString().toLowerCase() === 'location') { + return headers[i + 1] + } + } +} + +// https://tools.ietf.org/html/rfc7231#section-6.4.4 +function shouldRemoveHeader (header, removeContent, unknownOrigin) { + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) +} + +// https://tools.ietf.org/html/rfc7231#section-6.4 +function cleanRequestHeaders (headers, removeContent, unknownOrigin) { + const ret = [] + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { + ret.push(headers[i], headers[i + 1]) + } + } + } else if (headers && typeof headers === 'object') { + for (const key of Object.keys(headers)) { + if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { + ret.push(key, headers[key]) + } + } + } else { + assert(headers == null, 'headers must be an object or an array') + } + return ret +} + +module.exports = RedirectHandler diff --git a/node_modules/undici/lib/handler/RetryHandler.js b/node_modules/undici/lib/handler/RetryHandler.js new file mode 100644 index 0000000..3710447 --- /dev/null +++ b/node_modules/undici/lib/handler/RetryHandler.js @@ -0,0 +1,336 @@ +const assert = require('assert') + +const { kRetryHandlerDefaultRetry } = require('../core/symbols') +const { RequestRetryError } = require('../core/errors') +const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util') + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) + } + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } + + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state + + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } + + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return + } + + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } + + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } + + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } + + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + + state.currentTimeout = retryTimeout + + setTimeout(() => cb(null), retryTimeout) + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) + + this.retryCount += 1 + + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null + + if (statusCode !== 206) { + return true + } + + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + const { start, size, end = size } = contentRange + + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') + + this.resume = resume + return true + } + + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) + + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const { start, size, end = size } = range + + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) + + this.start = start + this.end = end + } + + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } + + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) + + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null + + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + + this.abort(err) + + return false + } + + onData (chunk) { + this.start += chunk.length + + return this.handler.onData(chunk) + } + + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) + } + + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) + + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) + } + } + } +} + +module.exports = RetryHandler diff --git a/node_modules/undici/lib/interceptor/redirectInterceptor.js b/node_modules/undici/lib/interceptor/redirectInterceptor.js new file mode 100644 index 0000000..7cc035e --- /dev/null +++ b/node_modules/undici/lib/interceptor/redirectInterceptor.js @@ -0,0 +1,21 @@ +'use strict' + +const RedirectHandler = require('../handler/RedirectHandler') + +function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { + return (dispatch) => { + return function Intercept (opts, handler) { + const { maxRedirections = defaultMaxRedirections } = opts + + if (!maxRedirections) { + return dispatch(opts, handler) + } + + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) + opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. + return dispatch(opts, redirectHandler) + } + } +} + +module.exports = createRedirectInterceptor diff --git a/node_modules/undici/lib/llhttp/constants.d.ts b/node_modules/undici/lib/llhttp/constants.d.ts new file mode 100644 index 0000000..b75ab1b --- /dev/null +++ b/node_modules/undici/lib/llhttp/constants.d.ts @@ -0,0 +1,199 @@ +import { IEnumMap } from './utils'; +export declare type HTTPMode = 'loose' | 'strict'; +export declare enum ERROR { + OK = 0, + INTERNAL = 1, + STRICT = 2, + LF_EXPECTED = 3, + UNEXPECTED_CONTENT_LENGTH = 4, + CLOSED_CONNECTION = 5, + INVALID_METHOD = 6, + INVALID_URL = 7, + INVALID_CONSTANT = 8, + INVALID_VERSION = 9, + INVALID_HEADER_TOKEN = 10, + INVALID_CONTENT_LENGTH = 11, + INVALID_CHUNK_SIZE = 12, + INVALID_STATUS = 13, + INVALID_EOF_STATE = 14, + INVALID_TRANSFER_ENCODING = 15, + CB_MESSAGE_BEGIN = 16, + CB_HEADERS_COMPLETE = 17, + CB_MESSAGE_COMPLETE = 18, + CB_CHUNK_HEADER = 19, + CB_CHUNK_COMPLETE = 20, + PAUSED = 21, + PAUSED_UPGRADE = 22, + PAUSED_H2_UPGRADE = 23, + USER = 24 +} +export declare enum TYPE { + BOTH = 0, + REQUEST = 1, + RESPONSE = 2 +} +export declare enum FLAGS { + CONNECTION_KEEP_ALIVE = 1, + CONNECTION_CLOSE = 2, + CONNECTION_UPGRADE = 4, + CHUNKED = 8, + UPGRADE = 16, + CONTENT_LENGTH = 32, + SKIPBODY = 64, + TRAILING = 128, + TRANSFER_ENCODING = 512 +} +export declare enum LENIENT_FLAGS { + HEADERS = 1, + CHUNKED_LENGTH = 2, + KEEP_ALIVE = 4 +} +export declare enum METHODS { + DELETE = 0, + GET = 1, + HEAD = 2, + POST = 3, + PUT = 4, + CONNECT = 5, + OPTIONS = 6, + TRACE = 7, + COPY = 8, + LOCK = 9, + MKCOL = 10, + MOVE = 11, + PROPFIND = 12, + PROPPATCH = 13, + SEARCH = 14, + UNLOCK = 15, + BIND = 16, + REBIND = 17, + UNBIND = 18, + ACL = 19, + REPORT = 20, + MKACTIVITY = 21, + CHECKOUT = 22, + MERGE = 23, + 'M-SEARCH' = 24, + NOTIFY = 25, + SUBSCRIBE = 26, + UNSUBSCRIBE = 27, + PATCH = 28, + PURGE = 29, + MKCALENDAR = 30, + LINK = 31, + UNLINK = 32, + SOURCE = 33, + PRI = 34, + DESCRIBE = 35, + ANNOUNCE = 36, + SETUP = 37, + PLAY = 38, + PAUSE = 39, + TEARDOWN = 40, + GET_PARAMETER = 41, + SET_PARAMETER = 42, + REDIRECT = 43, + RECORD = 44, + FLUSH = 45 +} +export declare const METHODS_HTTP: METHODS[]; +export declare const METHODS_ICE: METHODS[]; +export declare const METHODS_RTSP: METHODS[]; +export declare const METHOD_MAP: IEnumMap; +export declare const H_METHOD_MAP: IEnumMap; +export declare enum FINISH { + SAFE = 0, + SAFE_WITH_CB = 1, + UNSAFE = 2 +} +export declare type CharList = Array; +export declare const ALPHA: CharList; +export declare const NUM_MAP: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; +}; +export declare const HEX_MAP: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; + A: number; + B: number; + C: number; + D: number; + E: number; + F: number; + a: number; + b: number; + c: number; + d: number; + e: number; + f: number; +}; +export declare const NUM: CharList; +export declare const ALPHANUM: CharList; +export declare const MARK: CharList; +export declare const USERINFO_CHARS: CharList; +export declare const STRICT_URL_CHAR: CharList; +export declare const URL_CHAR: CharList; +export declare const HEX: CharList; +export declare const STRICT_TOKEN: CharList; +export declare const TOKEN: CharList; +export declare const HEADER_CHARS: CharList; +export declare const CONNECTION_TOKEN_CHARS: CharList; +export declare const MAJOR: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; +}; +export declare const MINOR: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; +}; +export declare enum HEADER_STATE { + GENERAL = 0, + CONNECTION = 1, + CONTENT_LENGTH = 2, + TRANSFER_ENCODING = 3, + UPGRADE = 4, + CONNECTION_KEEP_ALIVE = 5, + CONNECTION_CLOSE = 6, + CONNECTION_UPGRADE = 7, + TRANSFER_ENCODING_CHUNKED = 8 +} +export declare const SPECIAL_HEADERS: { + connection: HEADER_STATE; + 'content-length': HEADER_STATE; + 'proxy-connection': HEADER_STATE; + 'transfer-encoding': HEADER_STATE; + upgrade: HEADER_STATE; +}; diff --git a/node_modules/undici/lib/llhttp/constants.js b/node_modules/undici/lib/llhttp/constants.js new file mode 100644 index 0000000..fb0b5a2 --- /dev/null +++ b/node_modules/undici/lib/llhttp/constants.js @@ -0,0 +1,278 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; +const utils_1 = require("./utils"); +// C headers +var ERROR; +(function (ERROR) { + ERROR[ERROR["OK"] = 0] = "OK"; + ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; + ERROR[ERROR["STRICT"] = 2] = "STRICT"; + ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; + ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; + ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; + ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; + ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; + ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; + ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; + ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; + ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; + ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; + ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; + ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; + ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; + ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; + ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; + ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; + ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; + ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; + ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; + ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; + ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; + ERROR[ERROR["USER"] = 24] = "USER"; +})(ERROR = exports.ERROR || (exports.ERROR = {})); +var TYPE; +(function (TYPE) { + TYPE[TYPE["BOTH"] = 0] = "BOTH"; + TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; + TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; +})(TYPE = exports.TYPE || (exports.TYPE = {})); +var FLAGS; +(function (FLAGS) { + FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; + FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; + FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; + FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; + FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; + FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; + FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; + FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; + // 1 << 8 is unused + FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; +})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); +var LENIENT_FLAGS; +(function (LENIENT_FLAGS) { + LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; + LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; + LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; +})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); +var METHODS; +(function (METHODS) { + METHODS[METHODS["DELETE"] = 0] = "DELETE"; + METHODS[METHODS["GET"] = 1] = "GET"; + METHODS[METHODS["HEAD"] = 2] = "HEAD"; + METHODS[METHODS["POST"] = 3] = "POST"; + METHODS[METHODS["PUT"] = 4] = "PUT"; + /* pathological */ + METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; + METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; + METHODS[METHODS["TRACE"] = 7] = "TRACE"; + /* WebDAV */ + METHODS[METHODS["COPY"] = 8] = "COPY"; + METHODS[METHODS["LOCK"] = 9] = "LOCK"; + METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; + METHODS[METHODS["MOVE"] = 11] = "MOVE"; + METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; + METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; + METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; + METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; + METHODS[METHODS["BIND"] = 16] = "BIND"; + METHODS[METHODS["REBIND"] = 17] = "REBIND"; + METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; + METHODS[METHODS["ACL"] = 19] = "ACL"; + /* subversion */ + METHODS[METHODS["REPORT"] = 20] = "REPORT"; + METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; + METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; + METHODS[METHODS["MERGE"] = 23] = "MERGE"; + /* upnp */ + METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; + METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; + METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; + METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; + /* RFC-5789 */ + METHODS[METHODS["PATCH"] = 28] = "PATCH"; + METHODS[METHODS["PURGE"] = 29] = "PURGE"; + /* CalDAV */ + METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; + /* RFC-2068, section 19.6.1.2 */ + METHODS[METHODS["LINK"] = 31] = "LINK"; + METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; + /* icecast */ + METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; + /* RFC-7540, section 11.6 */ + METHODS[METHODS["PRI"] = 34] = "PRI"; + /* RFC-2326 RTSP */ + METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; + METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; + METHODS[METHODS["SETUP"] = 37] = "SETUP"; + METHODS[METHODS["PLAY"] = 38] = "PLAY"; + METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; + METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; + METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; + METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; + METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; + METHODS[METHODS["RECORD"] = 44] = "RECORD"; + /* RAOP */ + METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; +})(METHODS = exports.METHODS || (exports.METHODS = {})); +exports.METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; +exports.METHODS_ICE = [ + METHODS.SOURCE, +]; +exports.METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + // For AirPlay + METHODS.GET, + METHODS.POST, +]; +exports.METHOD_MAP = utils_1.enumToMap(METHODS); +exports.H_METHOD_MAP = {}; +Object.keys(exports.METHOD_MAP).forEach((key) => { + if (/^H/.test(key)) { + exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; + } +}); +var FINISH; +(function (FINISH) { + FINISH[FINISH["SAFE"] = 0] = "SAFE"; + FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; + FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; +})(FINISH = exports.FINISH || (exports.FINISH = {})); +exports.ALPHA = []; +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + exports.ALPHA.push(String.fromCharCode(i)); + // Lower case + exports.ALPHA.push(String.fromCharCode(i + 0x20)); +} +exports.NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; +exports.HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; +exports.NUM = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; +exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); +exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; +exports.USERINFO_CHARS = exports.ALPHANUM + .concat(exports.MARK) + .concat(['%', ';', ':', '&', '=', '+', '$', ',']); +// TODO(indutny): use RFC +exports.STRICT_URL_CHAR = [ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +].concat(exports.ALPHANUM); +exports.URL_CHAR = exports.STRICT_URL_CHAR + .concat(['\t', '\f']); +// All characters with 0x80 bit set to 1 +for (let i = 0x80; i <= 0xff; i++) { + exports.URL_CHAR.push(i); +} +exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +exports.STRICT_TOKEN = [ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +].concat(exports.ALPHANUM); +exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +exports.HEADER_CHARS = ['\t']; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + exports.HEADER_CHARS.push(i); + } +} +// ',' = \x44 +exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); +exports.MAJOR = exports.NUM_MAP; +exports.MINOR = exports.MAJOR; +var HEADER_STATE; +(function (HEADER_STATE) { + HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; + HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; + HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; + HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; + HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; + HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; + HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; +})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); +exports.SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/undici/lib/llhttp/constants.js.map b/node_modules/undici/lib/llhttp/constants.js.map new file mode 100644 index 0000000..6ac54bc --- /dev/null +++ b/node_modules/undici/lib/llhttp/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/llhttp/constants.ts"],"names":[],"mappings":";;;AAAA,mCAA8C;AAI9C,YAAY;AAEZ,IAAY,KA6BX;AA7BD,WAAY,KAAK;IACf,6BAAM,CAAA;IACN,yCAAQ,CAAA;IACR,qCAAM,CAAA;IACN,+CAAW,CAAA;IACX,2EAAyB,CAAA;IACzB,2DAAiB,CAAA;IACjB,qDAAc,CAAA;IACd,+CAAW,CAAA;IACX,yDAAgB,CAAA;IAChB,uDAAe,CAAA;IACf,kEAAoB,CAAA;IACpB,sEAAsB,CAAA;IACtB,8DAAkB,CAAA;IAClB,sDAAc,CAAA;IACd,4DAAiB,CAAA;IACjB,4EAAyB,CAAA;IAEzB,0DAAgB,CAAA;IAChB,gEAAmB,CAAA;IACnB,gEAAmB,CAAA;IACnB,wDAAe,CAAA;IACf,4DAAiB,CAAA;IAEjB,sCAAM,CAAA;IACN,sDAAc,CAAA;IACd,4DAAiB,CAAA;IAEjB,kCAAI,CAAA;AACN,CAAC,EA7BW,KAAK,GAAL,aAAK,KAAL,aAAK,QA6BhB;AAED,IAAY,IAIX;AAJD,WAAY,IAAI;IACd,+BAAQ,CAAA;IACR,qCAAO,CAAA;IACP,uCAAQ,CAAA;AACV,CAAC,EAJW,IAAI,GAAJ,YAAI,KAAJ,YAAI,QAIf;AAED,IAAY,KAWX;AAXD,WAAY,KAAK;IACf,mEAA8B,CAAA;IAC9B,yDAAyB,CAAA;IACzB,6DAA2B,CAAA;IAC3B,uCAAgB,CAAA;IAChB,wCAAgB,CAAA;IAChB,sDAAuB,CAAA;IACvB,0CAAiB,CAAA;IACjB,2CAAiB,CAAA;IACjB,mBAAmB;IACnB,6DAA0B,CAAA;AAC5B,CAAC,EAXW,KAAK,GAAL,aAAK,KAAL,aAAK,QAWhB;AAED,IAAY,aAIX;AAJD,WAAY,aAAa;IACvB,uDAAgB,CAAA;IAChB,qEAAuB,CAAA;IACvB,6DAAmB,CAAA;AACrB,CAAC,EAJW,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAIxB;AAED,IAAY,OA0DX;AA1DD,WAAY,OAAO;IACjB,yCAAU,CAAA;IACV,mCAAO,CAAA;IACP,qCAAQ,CAAA;IACR,qCAAQ,CAAA;IACR,mCAAO,CAAA;IACP,kBAAkB;IAClB,2CAAW,CAAA;IACX,2CAAW,CAAA;IACX,uCAAS,CAAA;IACT,YAAY;IACZ,qCAAQ,CAAA;IACR,qCAAQ,CAAA;IACR,wCAAU,CAAA;IACV,sCAAS,CAAA;IACT,8CAAa,CAAA;IACb,gDAAc,CAAA;IACd,0CAAW,CAAA;IACX,0CAAW,CAAA;IACX,sCAAS,CAAA;IACT,0CAAW,CAAA;IACX,0CAAW,CAAA;IACX,oCAAQ,CAAA;IACR,gBAAgB;IAChB,0CAAW,CAAA;IACX,kDAAe,CAAA;IACf,8CAAa,CAAA;IACb,wCAAU,CAAA;IACV,UAAU;IACV,8CAAe,CAAA;IACf,0CAAW,CAAA;IACX,gDAAc,CAAA;IACd,oDAAgB,CAAA;IAChB,cAAc;IACd,wCAAU,CAAA;IACV,wCAAU,CAAA;IACV,YAAY;IACZ,kDAAe,CAAA;IACf,gCAAgC;IAChC,sCAAS,CAAA;IACT,0CAAW,CAAA;IACX,aAAa;IACb,0CAAW,CAAA;IACX,4BAA4B;IAC5B,oCAAQ,CAAA;IACR,mBAAmB;IACnB,8CAAa,CAAA;IACb,8CAAa,CAAA;IACb,wCAAU,CAAA;IACV,sCAAS,CAAA;IACT,wCAAU,CAAA;IACV,8CAAa,CAAA;IACb,wDAAkB,CAAA;IAClB,wDAAkB,CAAA;IAClB,8CAAa,CAAA;IACb,0CAAW,CAAA;IACX,UAAU;IACV,wCAAU,CAAA;AACZ,CAAC,EA1DW,OAAO,GAAP,eAAO,KAAP,eAAO,QA0DlB;AAEY,QAAA,YAAY,GAAG;IAC1B,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,OAAO;IACf,OAAO,CAAC,OAAO;IACf,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,UAAU,CAAC;IACnB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,WAAW;IACnB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IAEX,+CAA+C;IAC/C,OAAO,CAAC,MAAM;CACf,CAAC;AAEW,QAAA,WAAW,GAAG;IACzB,OAAO,CAAC,MAAM;CACf,CAAC;AAEW,QAAA,YAAY,GAAG;IAC1B,OAAO,CAAC,OAAO;IACf,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,aAAa;IACrB,OAAO,CAAC,aAAa;IACrB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,KAAK;IAEb,cAAc;IACd,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,IAAI;CACb,CAAC;AAEW,QAAA,UAAU,GAAG,iBAAS,CAAC,OAAO,CAAC,CAAC;AAChC,QAAA,YAAY,GAAa,EAAE,CAAC;AAEzC,MAAM,CAAC,IAAI,CAAC,kBAAU,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAClB,oBAAY,CAAC,GAAG,CAAC,GAAG,kBAAU,CAAC,GAAG,CAAC,CAAC;KACrC;AACH,CAAC,CAAC,CAAC;AAEH,IAAY,MAIX;AAJD,WAAY,MAAM;IAChB,mCAAQ,CAAA;IACR,mDAAY,CAAA;IACZ,uCAAM,CAAA;AACR,CAAC,EAJW,MAAM,GAAN,cAAM,KAAN,cAAM,QAIjB;AAMY,QAAA,KAAK,GAAa,EAAE,CAAC;AAElC,KAAK,IAAI,CAAC,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;IAC3D,aAAa;IACb,aAAK,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;IAEnC,aAAa;IACb,aAAK,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;CAC3C;AAEY,QAAA,OAAO,GAAG;IACrB,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IAC5B,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;CAC7B,CAAC;AAEW,QAAA,OAAO,GAAG;IACrB,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IAC5B,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IAC5B,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG;IAC9C,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG;CAC/C,CAAC;AAEW,QAAA,GAAG,GAAa;IAC3B,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG;CACjD,CAAC;AAEW,QAAA,QAAQ,GAAa,aAAK,CAAC,MAAM,CAAC,WAAG,CAAC,CAAC;AACvC,QAAA,IAAI,GAAa,CAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,GAAG,EAAE,GAAG,CAAE,CAAC;AAClE,QAAA,cAAc,GAAa,gBAAQ;KAC7C,MAAM,CAAC,YAAI,CAAC;KACZ,MAAM,CAAC,CAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAE,CAAC,CAAC;AAEtD,yBAAyB;AACZ,QAAA,eAAe,GAAc;IACxC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,IAAI;IAC7B,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG;IACtC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG;IACvB,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG;IAC7B,GAAG;IACH,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG;CACN,CAAC,MAAM,CAAC,gBAAQ,CAAC,CAAC;AAEnB,QAAA,QAAQ,GAAa,uBAAe;KAC9C,MAAM,CAAE,CAAE,IAAI,EAAE,IAAI,CAAe,CAAC,CAAC;AAExC,wCAAwC;AACxC,KAAK,IAAI,CAAC,GAAG,IAAI,EAAE,CAAC,IAAI,IAAI,EAAE,CAAC,EAAE,EAAE;IACjC,gBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;CAClB;AAEY,QAAA,GAAG,GAAa,WAAG,CAAC,MAAM,CACrC,CAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAE,CAAC,CAAC;AAElE;;;;;;GAMG;AACU,QAAA,YAAY,GAAc;IACrC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,IAAI;IAC7B,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG;IAClB,GAAG,EAAE,GAAG,EAAE,GAAG;IACb,GAAG,EAAE,GAAG;CACI,CAAC,MAAM,CAAC,gBAAQ,CAAC,CAAC;AAEnB,QAAA,KAAK,GAAa,oBAAY,CAAC,MAAM,CAAC,CAAE,GAAG,CAAE,CAAC,CAAC;AAE5D;;;GAGG;AACU,QAAA,YAAY,GAAa,CAAE,IAAI,CAAE,CAAC;AAC/C,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,GAAG,EAAE,CAAC,EAAE,EAAE;IAC9B,IAAI,CAAC,KAAK,GAAG,EAAE;QACb,oBAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACtB;CACF;AAED,aAAa;AACA,QAAA,sBAAsB,GACjC,oBAAY,CAAC,MAAM,CAAC,CAAC,CAAkB,EAAE,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;AAE3C,QAAA,KAAK,GAAG,eAAO,CAAC;AAChB,QAAA,KAAK,GAAG,aAAK,CAAC;AAE3B,IAAY,YAWX;AAXD,WAAY,YAAY;IACtB,qDAAW,CAAA;IACX,2DAAU,CAAA;IACV,mEAAc,CAAA;IACd,yEAAiB,CAAA;IACjB,qDAAO,CAAA;IAEP,iFAAqB,CAAA;IACrB,uEAAgB,CAAA;IAChB,2EAAkB,CAAA;IAClB,yFAAyB,CAAA;AAC3B,CAAC,EAXW,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAWvB;AAEY,QAAA,eAAe,GAAG;IAC7B,YAAY,EAAE,YAAY,CAAC,UAAU;IACrC,gBAAgB,EAAE,YAAY,CAAC,cAAc;IAC7C,kBAAkB,EAAE,YAAY,CAAC,UAAU;IAC3C,mBAAmB,EAAE,YAAY,CAAC,iBAAiB;IACnD,SAAS,EAAE,YAAY,CAAC,OAAO;CAChC,CAAC"} \ No newline at end of file diff --git a/node_modules/undici/lib/llhttp/llhttp-wasm.js b/node_modules/undici/lib/llhttp/llhttp-wasm.js new file mode 100644 index 0000000..ad4682c --- /dev/null +++ b/node_modules/undici/lib/llhttp/llhttp-wasm.js @@ -0,0 +1 @@ +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' diff --git a/node_modules/undici/lib/llhttp/llhttp.wasm b/node_modules/undici/lib/llhttp/llhttp.wasm new file mode 100755 index 0000000..fe63282 Binary files /dev/null and b/node_modules/undici/lib/llhttp/llhttp.wasm differ diff --git a/node_modules/undici/lib/llhttp/llhttp_simd-wasm.js b/node_modules/undici/lib/llhttp/llhttp_simd-wasm.js new file mode 100644 index 0000000..017e247 --- /dev/null +++ b/node_modules/undici/lib/llhttp/llhttp_simd-wasm.js @@ -0,0 +1 @@ +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' diff --git a/node_modules/undici/lib/llhttp/llhttp_simd.wasm b/node_modules/undici/lib/llhttp/llhttp_simd.wasm new file mode 100755 index 0000000..7aa0b55 Binary files /dev/null and b/node_modules/undici/lib/llhttp/llhttp_simd.wasm differ diff --git a/node_modules/undici/lib/llhttp/utils.d.ts b/node_modules/undici/lib/llhttp/utils.d.ts new file mode 100644 index 0000000..15497f3 --- /dev/null +++ b/node_modules/undici/lib/llhttp/utils.d.ts @@ -0,0 +1,4 @@ +export interface IEnumMap { + [key: string]: number; +} +export declare function enumToMap(obj: any): IEnumMap; diff --git a/node_modules/undici/lib/llhttp/utils.js b/node_modules/undici/lib/llhttp/utils.js new file mode 100644 index 0000000..8a32e56 --- /dev/null +++ b/node_modules/undici/lib/llhttp/utils.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.enumToMap = void 0; +function enumToMap(obj) { + const res = {}; + Object.keys(obj).forEach((key) => { + const value = obj[key]; + if (typeof value === 'number') { + res[key] = value; + } + }); + return res; +} +exports.enumToMap = enumToMap; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/undici/lib/llhttp/utils.js.map b/node_modules/undici/lib/llhttp/utils.js.map new file mode 100644 index 0000000..2d7c356 --- /dev/null +++ b/node_modules/undici/lib/llhttp/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/llhttp/utils.ts"],"names":[],"mappings":";;;AAIA,SAAgB,SAAS,CAAC,GAAQ;IAChC,MAAM,GAAG,GAAa,EAAE,CAAC;IAEzB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;QAC/B,MAAM,KAAK,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;QACvB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SAClB;IACH,CAAC,CAAC,CAAC;IAEH,OAAO,GAAG,CAAC;AACb,CAAC;AAXD,8BAWC"} \ No newline at end of file diff --git a/node_modules/undici/lib/llhttp/wasm_build_env.txt b/node_modules/undici/lib/llhttp/wasm_build_env.txt new file mode 100644 index 0000000..5f478b5 --- /dev/null +++ b/node_modules/undici/lib/llhttp/wasm_build_env.txt @@ -0,0 +1,32 @@ +alpine-baselayout-data-3.4.0-r0 +musl-1.2.3-r4 +busybox-1.35.0-r29 +busybox-binsh-1.35.0-r29 +alpine-baselayout-3.4.0-r0 +alpine-keys-2.4-r1 +ca-certificates-bundle-20220614-r4 +libcrypto3-3.0.8-r3 +libssl3-3.0.8-r3 +ssl_client-1.35.0-r29 +zlib-1.2.13-r0 +apk-tools-2.12.10-r1 +scanelf-1.3.5-r1 +musl-utils-1.2.3-r4 +libc-utils-0.7.2-r3 +libgcc-12.2.1_git20220924-r4 +libstdc++-12.2.1_git20220924-r4 +libffi-3.4.4-r0 +xz-libs-5.2.9-r0 +libxml2-2.10.4-r0 +zstd-libs-1.5.5-r0 +llvm15-libs-15.0.7-r0 +clang15-libs-15.0.7-r0 +libstdc++-dev-12.2.1_git20220924-r4 +clang15-15.0.7-r0 +lld-libs-15.0.7-r0 +lld-15.0.7-r0 +wasi-libc-0.20220525-r1 +wasi-libcxx-15.0.7-r0 +wasi-libcxxabi-15.0.7-r0 +wasi-compiler-rt-15.0.7-r0 +wasi-sdk-16-r0 diff --git a/node_modules/undici/lib/mock/mock-agent.js b/node_modules/undici/lib/mock/mock-agent.js new file mode 100644 index 0000000..828e8af --- /dev/null +++ b/node_modules/undici/lib/mock/mock-agent.js @@ -0,0 +1,171 @@ +'use strict' + +const { kClients } = require('../core/symbols') +const Agent = require('../agent') +const { + kAgent, + kMockAgentSet, + kMockAgentGet, + kDispatches, + kIsMockActive, + kNetConnect, + kGetNetConnect, + kOptions, + kFactory +} = require('./mock-symbols') +const MockClient = require('./mock-client') +const MockPool = require('./mock-pool') +const { matchValue, buildMockOptions } = require('./mock-utils') +const { InvalidArgumentError, UndiciError } = require('../core/errors') +const Dispatcher = require('../dispatcher') +const Pluralizer = require('./pluralizer') +const PendingInterceptorsFormatter = require('./pending-interceptors-formatter') + +class FakeWeakRef { + constructor (value) { + this.value = value + } + + deref () { + return this.value + } +} + +class MockAgent extends Dispatcher { + constructor (opts) { + super(opts) + + this[kNetConnect] = true + this[kIsMockActive] = true + + // Instantiate Agent and encapsulate + if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + const agent = opts && opts.agent ? opts.agent : new Agent(opts) + this[kAgent] = agent + + this[kClients] = agent[kClients] + this[kOptions] = buildMockOptions(opts) + } + + get (origin) { + let dispatcher = this[kMockAgentGet](origin) + + if (!dispatcher) { + dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + } + return dispatcher + } + + dispatch (opts, handler) { + // Call MockAgent.get to perform additional setup before dispatching as normal + this.get(opts.origin) + return this[kAgent].dispatch(opts, handler) + } + + async close () { + await this[kAgent].close() + this[kClients].clear() + } + + deactivate () { + this[kIsMockActive] = false + } + + activate () { + this[kIsMockActive] = true + } + + enableNetConnect (matcher) { + if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { + if (Array.isArray(this[kNetConnect])) { + this[kNetConnect].push(matcher) + } else { + this[kNetConnect] = [matcher] + } + } else if (typeof matcher === 'undefined') { + this[kNetConnect] = true + } else { + throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') + } + } + + disableNetConnect () { + this[kNetConnect] = false + } + + // This is required to bypass issues caused by using global symbols - see: + // https://github.com/nodejs/undici/issues/1447 + get isMockActive () { + return this[kIsMockActive] + } + + [kMockAgentSet] (origin, dispatcher) { + this[kClients].set(origin, new FakeWeakRef(dispatcher)) + } + + [kFactory] (origin) { + const mockOptions = Object.assign({ agent: this }, this[kOptions]) + return this[kOptions] && this[kOptions].connections === 1 + ? new MockClient(origin, mockOptions) + : new MockPool(origin, mockOptions) + } + + [kMockAgentGet] (origin) { + // First check if we can immediately find it + const ref = this[kClients].get(origin) + if (ref) { + return ref.deref() + } + + // If the origin is not a string create a dummy parent pool and return to user + if (typeof origin !== 'string') { + const dispatcher = this[kFactory]('http://localhost:9999') + this[kMockAgentSet](origin, dispatcher) + return dispatcher + } + + // If we match, create a pool and assign the same dispatches + for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { + const nonExplicitDispatcher = nonExplicitRef.deref() + if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { + const dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] + return dispatcher + } + } + } + + [kGetNetConnect] () { + return this[kNetConnect] + } + + pendingInterceptors () { + const mockAgentClients = this[kClients] + + return Array.from(mockAgentClients.entries()) + .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) + .filter(({ pending }) => pending) + } + + assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { + const pending = this.pendingInterceptors() + + if (pending.length === 0) { + return + } + + const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) + + throw new UndiciError(` +${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: + +${pendingInterceptorsFormatter.format(pending)} +`.trim()) + } +} + +module.exports = MockAgent diff --git a/node_modules/undici/lib/mock/mock-client.js b/node_modules/undici/lib/mock/mock-client.js new file mode 100644 index 0000000..5f31215 --- /dev/null +++ b/node_modules/undici/lib/mock/mock-client.js @@ -0,0 +1,59 @@ +'use strict' + +const { promisify } = require('util') +const Client = require('../client') +const { buildMockDispatch } = require('./mock-utils') +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = require('./mock-symbols') +const { MockInterceptor } = require('./mock-interceptor') +const Symbols = require('../core/symbols') +const { InvalidArgumentError } = require('../core/errors') + +/** + * MockClient provides an API that extends the Client to influence the mockDispatches. + */ +class MockClient extends Client { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } + + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } +} + +module.exports = MockClient diff --git a/node_modules/undici/lib/mock/mock-errors.js b/node_modules/undici/lib/mock/mock-errors.js new file mode 100644 index 0000000..5442c0e --- /dev/null +++ b/node_modules/undici/lib/mock/mock-errors.js @@ -0,0 +1,17 @@ +'use strict' + +const { UndiciError } = require('../core/errors') + +class MockNotMatchedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, MockNotMatchedError) + this.name = 'MockNotMatchedError' + this.message = message || 'The request does not match any registered mock dispatches' + this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' + } +} + +module.exports = { + MockNotMatchedError +} diff --git a/node_modules/undici/lib/mock/mock-interceptor.js b/node_modules/undici/lib/mock/mock-interceptor.js new file mode 100644 index 0000000..781e477 --- /dev/null +++ b/node_modules/undici/lib/mock/mock-interceptor.js @@ -0,0 +1,206 @@ +'use strict' + +const { getResponseData, buildKey, addMockDispatch } = require('./mock-utils') +const { + kDispatches, + kDispatchKey, + kDefaultHeaders, + kDefaultTrailers, + kContentLength, + kMockDispatch +} = require('./mock-symbols') +const { InvalidArgumentError } = require('../core/errors') +const { buildURL } = require('../core/util') + +/** + * Defines the scope API for an interceptor reply + */ +class MockScope { + constructor (mockDispatch) { + this[kMockDispatch] = mockDispatch + } + + /** + * Delay a reply by a set amount in ms. + */ + delay (waitInMs) { + if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { + throw new InvalidArgumentError('waitInMs must be a valid integer > 0') + } + + this[kMockDispatch].delay = waitInMs + return this + } + + /** + * For a defined reply, never mark as consumed. + */ + persist () { + this[kMockDispatch].persist = true + return this + } + + /** + * Allow one to define a reply for a set amount of matching requests. + */ + times (repeatTimes) { + if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { + throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') + } + + this[kMockDispatch].times = repeatTimes + return this + } +} + +/** + * Defines an interceptor for a Mock + */ +class MockInterceptor { + constructor (opts, mockDispatches) { + if (typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object') + } + if (typeof opts.path === 'undefined') { + throw new InvalidArgumentError('opts.path must be defined') + } + if (typeof opts.method === 'undefined') { + opts.method = 'GET' + } + // See https://github.com/nodejs/undici/issues/1245 + // As per RFC 3986, clients are not supposed to send URI + // fragments to servers when they retrieve a document, + if (typeof opts.path === 'string') { + if (opts.query) { + opts.path = buildURL(opts.path, opts.query) + } else { + // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 + const parsedURL = new URL(opts.path, 'data://') + opts.path = parsedURL.pathname + parsedURL.search + } + } + if (typeof opts.method === 'string') { + opts.method = opts.method.toUpperCase() + } + + this[kDispatchKey] = buildKey(opts) + this[kDispatches] = mockDispatches + this[kDefaultHeaders] = {} + this[kDefaultTrailers] = {} + this[kContentLength] = false + } + + createMockScopeDispatchData (statusCode, data, responseOptions = {}) { + const responseData = getResponseData(data) + const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} + const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } + const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } + + return { statusCode, data, headers, trailers } + } + + validateReplyParameters (statusCode, data, responseOptions) { + if (typeof statusCode === 'undefined') { + throw new InvalidArgumentError('statusCode must be defined') + } + if (typeof data === 'undefined') { + throw new InvalidArgumentError('data must be defined') + } + if (typeof responseOptions !== 'object') { + throw new InvalidArgumentError('responseOptions must be an object') + } + } + + /** + * Mock an undici request with a defined reply. + */ + reply (replyData) { + // Values of reply aren't available right now as they + // can only be available when the reply callback is invoked. + if (typeof replyData === 'function') { + // We'll first wrap the provided callback in another function, + // this function will properly resolve the data from the callback + // when invoked. + const wrappedDefaultsCallback = (opts) => { + // Our reply options callback contains the parameter for statusCode, data and options. + const resolvedData = replyData(opts) + + // Check if it is in the right format + if (typeof resolvedData !== 'object') { + throw new InvalidArgumentError('reply options callback must return an object') + } + + const { statusCode, data = '', responseOptions = {} } = resolvedData + this.validateReplyParameters(statusCode, data, responseOptions) + // Since the values can be obtained immediately we return them + // from this higher order function that will be resolved later. + return { + ...this.createMockScopeDispatchData(statusCode, data, responseOptions) + } + } + + // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) + return new MockScope(newMockDispatch) + } + + // We can have either one or three parameters, if we get here, + // we should have 1-3 parameters. So we spread the arguments of + // this function to obtain the parameters, since replyData will always + // just be the statusCode. + const [statusCode, data = '', responseOptions = {}] = [...arguments] + this.validateReplyParameters(statusCode, data, responseOptions) + + // Send in-already provided data like usual + const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) + return new MockScope(newMockDispatch) + } + + /** + * Mock an undici request with a defined error. + */ + replyWithError (error) { + if (typeof error === 'undefined') { + throw new InvalidArgumentError('error must be defined') + } + + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) + return new MockScope(newMockDispatch) + } + + /** + * Set default reply headers on the interceptor for subsequent replies + */ + defaultReplyHeaders (headers) { + if (typeof headers === 'undefined') { + throw new InvalidArgumentError('headers must be defined') + } + + this[kDefaultHeaders] = headers + return this + } + + /** + * Set default reply trailers on the interceptor for subsequent replies + */ + defaultReplyTrailers (trailers) { + if (typeof trailers === 'undefined') { + throw new InvalidArgumentError('trailers must be defined') + } + + this[kDefaultTrailers] = trailers + return this + } + + /** + * Set reply content length header for replies on the interceptor + */ + replyContentLength () { + this[kContentLength] = true + return this + } +} + +module.exports.MockInterceptor = MockInterceptor +module.exports.MockScope = MockScope diff --git a/node_modules/undici/lib/mock/mock-pool.js b/node_modules/undici/lib/mock/mock-pool.js new file mode 100644 index 0000000..0a3a7cd --- /dev/null +++ b/node_modules/undici/lib/mock/mock-pool.js @@ -0,0 +1,59 @@ +'use strict' + +const { promisify } = require('util') +const Pool = require('../pool') +const { buildMockDispatch } = require('./mock-utils') +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = require('./mock-symbols') +const { MockInterceptor } = require('./mock-interceptor') +const Symbols = require('../core/symbols') +const { InvalidArgumentError } = require('../core/errors') + +/** + * MockPool provides an API that extends the Pool to influence the mockDispatches. + */ +class MockPool extends Pool { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } + + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } +} + +module.exports = MockPool diff --git a/node_modules/undici/lib/mock/mock-symbols.js b/node_modules/undici/lib/mock/mock-symbols.js new file mode 100644 index 0000000..8c4cbb6 --- /dev/null +++ b/node_modules/undici/lib/mock/mock-symbols.js @@ -0,0 +1,23 @@ +'use strict' + +module.exports = { + kAgent: Symbol('agent'), + kOptions: Symbol('options'), + kFactory: Symbol('factory'), + kDispatches: Symbol('dispatches'), + kDispatchKey: Symbol('dispatch key'), + kDefaultHeaders: Symbol('default headers'), + kDefaultTrailers: Symbol('default trailers'), + kContentLength: Symbol('content length'), + kMockAgent: Symbol('mock agent'), + kMockAgentSet: Symbol('mock agent set'), + kMockAgentGet: Symbol('mock agent get'), + kMockDispatch: Symbol('mock dispatch'), + kClose: Symbol('close'), + kOriginalClose: Symbol('original agent close'), + kOrigin: Symbol('origin'), + kIsMockActive: Symbol('is mock active'), + kNetConnect: Symbol('net connect'), + kGetNetConnect: Symbol('get net connect'), + kConnected: Symbol('connected') +} diff --git a/node_modules/undici/lib/mock/mock-utils.js b/node_modules/undici/lib/mock/mock-utils.js new file mode 100644 index 0000000..42ea185 --- /dev/null +++ b/node_modules/undici/lib/mock/mock-utils.js @@ -0,0 +1,351 @@ +'use strict' + +const { MockNotMatchedError } = require('./mock-errors') +const { + kDispatches, + kMockAgent, + kOriginalDispatch, + kOrigin, + kGetNetConnect +} = require('./mock-symbols') +const { buildURL, nop } = require('../core/util') +const { STATUS_CODES } = require('http') +const { + types: { + isPromise + } +} = require('util') + +function matchValue (match, value) { + if (typeof match === 'string') { + return match === value + } + if (match instanceof RegExp) { + return match.test(value) + } + if (typeof match === 'function') { + return match(value) === true + } + return false +} + +function lowerCaseEntries (headers) { + return Object.fromEntries( + Object.entries(headers).map(([headerName, headerValue]) => { + return [headerName.toLocaleLowerCase(), headerValue] + }) + ) +} + +/** + * @param {import('../../index').Headers|string[]|Record} headers + * @param {string} key + */ +function getHeaderByName (headers, key) { + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { + return headers[i + 1] + } + } + + return undefined + } else if (typeof headers.get === 'function') { + return headers.get(key) + } else { + return lowerCaseEntries(headers)[key.toLocaleLowerCase()] + } +} + +/** @param {string[]} headers */ +function buildHeadersFromArray (headers) { // fetch HeadersList + const clone = headers.slice() + const entries = [] + for (let index = 0; index < clone.length; index += 2) { + entries.push([clone[index], clone[index + 1]]) + } + return Object.fromEntries(entries) +} + +function matchHeaders (mockDispatch, headers) { + if (typeof mockDispatch.headers === 'function') { + if (Array.isArray(headers)) { // fetch HeadersList + headers = buildHeadersFromArray(headers) + } + return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) + } + if (typeof mockDispatch.headers === 'undefined') { + return true + } + if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { + return false + } + + for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { + const headerValue = getHeaderByName(headers, matchHeaderName) + + if (!matchValue(matchHeaderValue, headerValue)) { + return false + } + } + return true +} + +function safeUrl (path) { + if (typeof path !== 'string') { + return path + } + + const pathSegments = path.split('?') + + if (pathSegments.length !== 2) { + return path + } + + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') +} + +function matchKey (mockDispatch, { path, method, body, headers }) { + const pathMatch = matchValue(mockDispatch.path, path) + const methodMatch = matchValue(mockDispatch.method, method) + const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true + const headersMatch = matchHeaders(mockDispatch, headers) + return pathMatch && methodMatch && bodyMatch && headersMatch +} + +function getResponseData (data) { + if (Buffer.isBuffer(data)) { + return data + } else if (typeof data === 'object') { + return JSON.stringify(data) + } else { + return data.toString() + } +} + +function getMockDispatch (mockDispatches, key) { + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath + + // Match path + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) + } + + // Match method + matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) + } + + // Match body + matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) + } + + // Match headers + matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + } + + return matchedMockDispatches[0] +} + +function addMockDispatch (mockDispatches, key, data) { + const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } + const replyData = typeof data === 'function' ? { callback: data } : { ...data } + const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } + mockDispatches.push(newMockDispatch) + return newMockDispatch +} + +function deleteMockDispatch (mockDispatches, key) { + const index = mockDispatches.findIndex(dispatch => { + if (!dispatch.consumed) { + return false + } + return matchKey(dispatch, key) + }) + if (index !== -1) { + mockDispatches.splice(index, 1) + } +} + +function buildKey (opts) { + const { path, method, body, headers, query } = opts + return { + path, + method, + body, + headers, + query + } +} + +function generateKeyValues (data) { + return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ + ...keyValuePairs, + Buffer.from(`${key}`), + Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) + ], []) +} + +/** + * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status + * @param {number} statusCode + */ +function getStatusText (statusCode) { + return STATUS_CODES[statusCode] || 'unknown' +} + +async function getResponse (body) { + const buffers = [] + for await (const data of body) { + buffers.push(data) + } + return Buffer.concat(buffers).toString('utf8') +} + +/** + * Mock dispatch function used to simulate undici dispatches + */ +function mockDispatch (opts, handler) { + // Get mock dispatch from built key + const key = buildKey(opts) + const mockDispatch = getMockDispatch(this[kDispatches], key) + + mockDispatch.timesInvoked++ + + // Here's where we resolve a callback if a callback is present for the dispatch data. + if (mockDispatch.data.callback) { + mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } + } + + // Parse mockDispatch data + const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch + const { timesInvoked, times } = mockDispatch + + // If it's used up and not persistent, mark as consumed + mockDispatch.consumed = !persist && timesInvoked >= times + mockDispatch.pending = timesInvoked < times + + // If specified, trigger dispatch error + if (error !== null) { + deleteMockDispatch(this[kDispatches], key) + handler.onError(error) + return true + } + + // Handle the request with a delay if necessary + if (typeof delay === 'number' && delay > 0) { + setTimeout(() => { + handleReply(this[kDispatches]) + }, delay) + } else { + handleReply(this[kDispatches]) + } + + function handleReply (mockDispatches, _data = data) { + // fetch's HeadersList is a 1D string array + const optsHeaders = Array.isArray(opts.headers) + ? buildHeadersFromArray(opts.headers) + : opts.headers + const body = typeof _data === 'function' + ? _data({ ...opts, headers: optsHeaders }) + : _data + + // util.types.isPromise is likely needed for jest. + if (isPromise(body)) { + // If handleReply is asynchronous, throwing an error + // in the callback will reject the promise, rather than + // synchronously throw the error, which breaks some tests. + // Rather, we wait for the callback to resolve if it is a + // promise, and then re-run handleReply with the new body. + body.then((newData) => handleReply(mockDispatches, newData)) + return + } + + const responseData = getResponseData(body) + const responseHeaders = generateKeyValues(headers) + const responseTrailers = generateKeyValues(trailers) + + handler.abort = nop + handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) + handler.onData(Buffer.from(responseData)) + handler.onComplete(responseTrailers) + deleteMockDispatch(mockDispatches, key) + } + + function resume () {} + + return true +} + +function buildMockDispatch () { + const agent = this[kMockAgent] + const origin = this[kOrigin] + const originalDispatch = this[kOriginalDispatch] + + return function dispatch (opts, handler) { + if (agent.isMockActive) { + try { + mockDispatch.call(this, opts, handler) + } catch (error) { + if (error instanceof MockNotMatchedError) { + const netConnect = agent[kGetNetConnect]() + if (netConnect === false) { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) + } + if (checkNetConnect(netConnect, origin)) { + originalDispatch.call(this, opts, handler) + } else { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) + } + } else { + throw error + } + } + } else { + originalDispatch.call(this, opts, handler) + } + } +} + +function checkNetConnect (netConnect, origin) { + const url = new URL(origin) + if (netConnect === true) { + return true + } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { + return true + } + return false +} + +function buildMockOptions (opts) { + if (opts) { + const { agent, ...mockOptions } = opts + return mockOptions + } +} + +module.exports = { + getResponseData, + getMockDispatch, + addMockDispatch, + deleteMockDispatch, + buildKey, + generateKeyValues, + matchValue, + getResponse, + getStatusText, + mockDispatch, + buildMockDispatch, + checkNetConnect, + buildMockOptions, + getHeaderByName +} diff --git a/node_modules/undici/lib/mock/pending-interceptors-formatter.js b/node_modules/undici/lib/mock/pending-interceptors-formatter.js new file mode 100644 index 0000000..1bc7539 --- /dev/null +++ b/node_modules/undici/lib/mock/pending-interceptors-formatter.js @@ -0,0 +1,40 @@ +'use strict' + +const { Transform } = require('stream') +const { Console } = require('console') + +/** + * Gets the output of `console.table(…)` as a string. + */ +module.exports = class PendingInterceptorsFormatter { + constructor ({ disableColors } = {}) { + this.transform = new Transform({ + transform (chunk, _enc, cb) { + cb(null, chunk) + } + }) + + this.logger = new Console({ + stdout: this.transform, + inspectOptions: { + colors: !disableColors && !process.env.CI + } + }) + } + + format (pendingInterceptors) { + const withPrettyHeaders = pendingInterceptors.map( + ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + Method: method, + Origin: origin, + Path: path, + 'Status code': statusCode, + Persistent: persist ? '✅' : '❌', + Invocations: timesInvoked, + Remaining: persist ? Infinity : times - timesInvoked + })) + + this.logger.table(withPrettyHeaders) + return this.transform.read().toString() + } +} diff --git a/node_modules/undici/lib/mock/pluralizer.js b/node_modules/undici/lib/mock/pluralizer.js new file mode 100644 index 0000000..47f150b --- /dev/null +++ b/node_modules/undici/lib/mock/pluralizer.js @@ -0,0 +1,29 @@ +'use strict' + +const singulars = { + pronoun: 'it', + is: 'is', + was: 'was', + this: 'this' +} + +const plurals = { + pronoun: 'they', + is: 'are', + was: 'were', + this: 'these' +} + +module.exports = class Pluralizer { + constructor (singular, plural) { + this.singular = singular + this.plural = plural + } + + pluralize (count) { + const one = count === 1 + const keys = one ? singulars : plurals + const noun = one ? this.singular : this.plural + return { ...keys, count, noun } + } +} diff --git a/node_modules/undici/lib/node/fixed-queue.js b/node_modules/undici/lib/node/fixed-queue.js new file mode 100644 index 0000000..3572681 --- /dev/null +++ b/node_modules/undici/lib/node/fixed-queue.js @@ -0,0 +1,117 @@ +/* eslint-disable */ + +'use strict' + +// Extracted from node/lib/internal/fixed_queue.js + +// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. +const kSize = 2048; +const kMask = kSize - 1; + +// The FixedQueue is implemented as a singly-linked list of fixed-size +// circular buffers. It looks something like this: +// +// head tail +// | | +// v v +// +-----------+ <-----\ +-----------+ <------\ +-----------+ +// | [null] | \----- | next | \------- | next | +// +-----------+ +-----------+ +-----------+ +// | item | <-- bottom | item | <-- bottom | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | bottom --> | item | +// | item | | item | | item | +// | ... | | ... | | ... | +// | item | | item | | item | +// | item | | item | | item | +// | [empty] | <-- top | item | | item | +// | [empty] | | item | | item | +// | [empty] | | [empty] | <-- top top --> | [empty] | +// +-----------+ +-----------+ +-----------+ +// +// Or, if there is only one circular buffer, it looks something +// like either of these: +// +// head tail head tail +// | | | | +// v v v v +// +-----------+ +-----------+ +// | [null] | | [null] | +// +-----------+ +-----------+ +// | [empty] | | item | +// | [empty] | | item | +// | item | <-- bottom top --> | [empty] | +// | item | | [empty] | +// | [empty] | <-- top bottom --> | item | +// | [empty] | | item | +// +-----------+ +-----------+ +// +// Adding a value means moving `top` forward by one, removing means +// moving `bottom` forward by one. After reaching the end, the queue +// wraps around. +// +// When `top === bottom` the current queue is empty and when +// `top + 1 === bottom` it's full. This wastes a single space of storage +// but allows much quicker checks. + +class FixedCircularBuffer { + constructor() { + this.bottom = 0; + this.top = 0; + this.list = new Array(kSize); + this.next = null; + } + + isEmpty() { + return this.top === this.bottom; + } + + isFull() { + return ((this.top + 1) & kMask) === this.bottom; + } + + push(data) { + this.list[this.top] = data; + this.top = (this.top + 1) & kMask; + } + + shift() { + const nextItem = this.list[this.bottom]; + if (nextItem === undefined) + return null; + this.list[this.bottom] = undefined; + this.bottom = (this.bottom + 1) & kMask; + return nextItem; + } +} + +module.exports = class FixedQueue { + constructor() { + this.head = this.tail = new FixedCircularBuffer(); + } + + isEmpty() { + return this.head.isEmpty(); + } + + push(data) { + if (this.head.isFull()) { + // Head is full: Creates a new queue, sets the old queue's `.next` to it, + // and sets it as the new main queue. + this.head = this.head.next = new FixedCircularBuffer(); + } + this.head.push(data); + } + + shift() { + const tail = this.tail; + const next = tail.shift(); + if (tail.isEmpty() && tail.next !== null) { + // If there is another queue, it forms the new tail. + this.tail = tail.next; + } + return next; + } +}; diff --git a/node_modules/undici/lib/pool-base.js b/node_modules/undici/lib/pool-base.js new file mode 100644 index 0000000..2a909ee --- /dev/null +++ b/node_modules/undici/lib/pool-base.js @@ -0,0 +1,194 @@ +'use strict' + +const DispatcherBase = require('./dispatcher-base') +const FixedQueue = require('./node/fixed-queue') +const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols') +const PoolStats = require('./pool-stats') + +const kClients = Symbol('clients') +const kNeedDrain = Symbol('needDrain') +const kQueue = Symbol('queue') +const kClosedResolve = Symbol('closed resolve') +const kOnDrain = Symbol('onDrain') +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kGetDispatcher = Symbol('get dispatcher') +const kAddClient = Symbol('add client') +const kRemoveClient = Symbol('remove client') +const kStats = Symbol('stats') + +class PoolBase extends DispatcherBase { + constructor () { + super() + + this[kQueue] = new FixedQueue() + this[kClients] = [] + this[kQueued] = 0 + + const pool = this + + this[kOnDrain] = function onDrain (origin, targets) { + const queue = pool[kQueue] + + let needDrain = false + + while (!needDrain) { + const item = queue.shift() + if (!item) { + break + } + pool[kQueued]-- + needDrain = !this.dispatch(item.opts, item.handler) + } + + this[kNeedDrain] = needDrain + + if (!this[kNeedDrain] && pool[kNeedDrain]) { + pool[kNeedDrain] = false + pool.emit('drain', origin, [pool, ...targets]) + } + + if (pool[kClosedResolve] && queue.isEmpty()) { + Promise + .all(pool[kClients].map(c => c.close())) + .then(pool[kClosedResolve]) + } + } + + this[kOnConnect] = (origin, targets) => { + pool.emit('connect', origin, [pool, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + pool.emit('disconnect', origin, [pool, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + pool.emit('connectionError', origin, [pool, ...targets], err) + } + + this[kStats] = new PoolStats(this) + } + + get [kBusy] () { + return this[kNeedDrain] + } + + get [kConnected] () { + return this[kClients].filter(client => client[kConnected]).length + } + + get [kFree] () { + return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length + } + + get [kPending] () { + let ret = this[kQueued] + for (const { [kPending]: pending } of this[kClients]) { + ret += pending + } + return ret + } + + get [kRunning] () { + let ret = 0 + for (const { [kRunning]: running } of this[kClients]) { + ret += running + } + return ret + } + + get [kSize] () { + let ret = this[kQueued] + for (const { [kSize]: size } of this[kClients]) { + ret += size + } + return ret + } + + get stats () { + return this[kStats] + } + + async [kClose] () { + if (this[kQueue].isEmpty()) { + return Promise.all(this[kClients].map(c => c.close())) + } else { + return new Promise((resolve) => { + this[kClosedResolve] = resolve + }) + } + } + + async [kDestroy] (err) { + while (true) { + const item = this[kQueue].shift() + if (!item) { + break + } + item.handler.onError(err) + } + + return Promise.all(this[kClients].map(c => c.destroy(err))) + } + + [kDispatch] (opts, handler) { + const dispatcher = this[kGetDispatcher]() + + if (!dispatcher) { + this[kNeedDrain] = true + this[kQueue].push({ opts, handler }) + this[kQueued]++ + } else if (!dispatcher.dispatch(opts, handler)) { + dispatcher[kNeedDrain] = true + this[kNeedDrain] = !this[kGetDispatcher]() + } + + return !this[kNeedDrain] + } + + [kAddClient] (client) { + client + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].push(client) + + if (this[kNeedDrain]) { + process.nextTick(() => { + if (this[kNeedDrain]) { + this[kOnDrain](client[kUrl], [this, client]) + } + }) + } + + return this + } + + [kRemoveClient] (client) { + client.close(() => { + const idx = this[kClients].indexOf(client) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + }) + + this[kNeedDrain] = this[kClients].some(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + } +} + +module.exports = { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} diff --git a/node_modules/undici/lib/pool-stats.js b/node_modules/undici/lib/pool-stats.js new file mode 100644 index 0000000..b4af8ae --- /dev/null +++ b/node_modules/undici/lib/pool-stats.js @@ -0,0 +1,34 @@ +const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols') +const kPool = Symbol('pool') + +class PoolStats { + constructor (pool) { + this[kPool] = pool + } + + get connected () { + return this[kPool][kConnected] + } + + get free () { + return this[kPool][kFree] + } + + get pending () { + return this[kPool][kPending] + } + + get queued () { + return this[kPool][kQueued] + } + + get running () { + return this[kPool][kRunning] + } + + get size () { + return this[kPool][kSize] + } +} + +module.exports = PoolStats diff --git a/node_modules/undici/lib/pool.js b/node_modules/undici/lib/pool.js new file mode 100644 index 0000000..e3cd339 --- /dev/null +++ b/node_modules/undici/lib/pool.js @@ -0,0 +1,94 @@ +'use strict' + +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kGetDispatcher +} = require('./pool-base') +const Client = require('./client') +const { + InvalidArgumentError +} = require('./core/errors') +const util = require('./core/util') +const { kUrl, kInterceptors } = require('./core/symbols') +const buildConnector = require('./core/connect') + +const kOptions = Symbol('options') +const kConnections = Symbol('connections') +const kFactory = Symbol('factory') + +function defaultFactory (origin, opts) { + return new Client(origin, opts) +} + +class Pool extends PoolBase { + constructor (origin, { + connections, + factory = defaultFactory, + connect, + connectTimeout, + tls, + maxCachedSessions, + socketPath, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + allowH2, + ...options + } = {}) { + super() + + if (connections != null && (!Number.isFinite(connections) || connections < 0)) { + throw new InvalidArgumentError('invalid connections') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) + ? options.interceptors.Pool + : [] + this[kConnections] = connections || null + this[kUrl] = util.parseOrigin(origin) + this[kOptions] = { ...util.deepClone(options), connect, allowH2 } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kFactory] = factory + } + + [kGetDispatcher] () { + let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) + + if (dispatcher) { + return dispatcher + } + + if (!this[kConnections] || this[kClients].length < this[kConnections]) { + dispatcher = this[kFactory](this[kUrl], this[kOptions]) + this[kAddClient](dispatcher) + } + + return dispatcher + } +} + +module.exports = Pool diff --git a/node_modules/undici/lib/proxy-agent.js b/node_modules/undici/lib/proxy-agent.js new file mode 100644 index 0000000..e3c0f6f --- /dev/null +++ b/node_modules/undici/lib/proxy-agent.js @@ -0,0 +1,189 @@ +'use strict' + +const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols') +const { URL } = require('url') +const Agent = require('./agent') +const Pool = require('./pool') +const DispatcherBase = require('./dispatcher-base') +const { InvalidArgumentError, RequestAbortedError } = require('./core/errors') +const buildConnector = require('./core/connect') + +const kAgent = Symbol('proxy agent') +const kClient = Symbol('proxy client') +const kProxyHeaders = Symbol('proxy headers') +const kRequestTls = Symbol('request tls settings') +const kProxyTls = Symbol('proxy tls settings') +const kConnectEndpoint = Symbol('connect endpoint function') + +function defaultProtocolPort (protocol) { + return protocol === 'https:' ? 443 : 80 +} + +function buildProxyOptions (opts) { + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + return { + uri: opts.uri, + protocol: opts.protocol || 'https' + } +} + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class ProxyAgent extends DispatcherBase { + constructor (opts) { + super(opts) + this[kProxy] = buildProxyOptions(opts) + this[kAgent] = new Agent(opts) + this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) + ? opts.interceptors.ProxyAgent + : [] + + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + const { clientFactory = defaultFactory } = opts + + if (typeof clientFactory !== 'function') { + throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') + } + + this[kRequestTls] = opts.requestTls + this[kProxyTls] = opts.proxyTls + this[kProxyHeaders] = opts.headers || {} + + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + + if (opts.auth && opts.token) { + throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') + } else if (opts.auth) { + /* @deprecated in favour of opts.token */ + this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` + } else if (opts.token) { + this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` + } + + const connect = buildConnector({ ...opts.proxyTls }) + this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) + this[kClient] = clientFactory(resolvedUrl, { connect }) + this[kAgent] = new Agent({ + ...opts, + connect: async (opts, callback) => { + let requestedHost = opts.host + if (!opts.port) { + requestedHost += `:${defaultProtocolPort(opts.protocol)}` + } + try { + const { socket, statusCode } = await this[kClient].connect({ + origin, + port, + path: requestedHost, + signal: opts.signal, + headers: { + ...this[kProxyHeaders], + host + } + }) + if (statusCode !== 200) { + socket.on('error', () => {}).destroy() + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) + } + if (opts.protocol !== 'https:') { + callback(null, socket) + return + } + let servername + if (this[kRequestTls]) { + servername = this[kRequestTls].servername + } else { + servername = opts.servername + } + this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) + } catch (err) { + callback(err) + } + } + }) + } + + dispatch (opts, handler) { + const { host } = new URL(opts.origin) + const headers = buildHeaders(opts.headers) + throwIfProxyAuthIsSent(headers) + return this[kAgent].dispatch( + { + ...opts, + headers: { + ...headers, + host + } + }, + handler + ) + } + + async [kClose] () { + await this[kAgent].close() + await this[kClient].close() + } + + async [kDestroy] () { + await this[kAgent].destroy() + await this[kClient].destroy() + } +} + +/** + * @param {string[] | Record} headers + * @returns {Record} + */ +function buildHeaders (headers) { + // When using undici.fetch, the headers list is stored + // as an array. + if (Array.isArray(headers)) { + /** @type {Record} */ + const headersPair = {} + + for (let i = 0; i < headers.length; i += 2) { + headersPair[headers[i]] = headers[i + 1] + } + + return headersPair + } + + return headers +} + +/** + * @param {Record} headers + * + * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers + * Nevertheless, it was changed and to avoid a security vulnerability by end users + * this check was created. + * It should be removed in the next major version for performance reasons + */ +function throwIfProxyAuthIsSent (headers) { + const existProxyAuth = headers && Object.keys(headers) + .find((key) => key.toLowerCase() === 'proxy-authorization') + if (existProxyAuth) { + throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') + } +} + +module.exports = ProxyAgent diff --git a/node_modules/undici/lib/timers.js b/node_modules/undici/lib/timers.js new file mode 100644 index 0000000..5782217 --- /dev/null +++ b/node_modules/undici/lib/timers.js @@ -0,0 +1,97 @@ +'use strict' + +let fastNow = Date.now() +let fastNowTimeout + +const fastTimers = [] + +function onTimeout () { + fastNow = Date.now() + + let len = fastTimers.length + let idx = 0 + while (idx < len) { + const timer = fastTimers[idx] + + if (timer.state === 0) { + timer.state = fastNow + timer.delay + } else if (timer.state > 0 && fastNow >= timer.state) { + timer.state = -1 + timer.callback(timer.opaque) + } + + if (timer.state === -1) { + timer.state = -2 + if (idx !== len - 1) { + fastTimers[idx] = fastTimers.pop() + } else { + fastTimers.pop() + } + len -= 1 + } else { + idx += 1 + } + } + + if (fastTimers.length > 0) { + refreshTimeout() + } +} + +function refreshTimeout () { + if (fastNowTimeout && fastNowTimeout.refresh) { + fastNowTimeout.refresh() + } else { + clearTimeout(fastNowTimeout) + fastNowTimeout = setTimeout(onTimeout, 1e3) + if (fastNowTimeout.unref) { + fastNowTimeout.unref() + } + } +} + +class Timeout { + constructor (callback, delay, opaque) { + this.callback = callback + this.delay = delay + this.opaque = opaque + + // -2 not in timer list + // -1 in timer list but inactive + // 0 in timer list waiting for time + // > 0 in timer list waiting for time to expire + this.state = -2 + + this.refresh() + } + + refresh () { + if (this.state === -2) { + fastTimers.push(this) + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + } + + this.state = 0 + } + + clear () { + this.state = -1 + } +} + +module.exports = { + setTimeout (callback, delay, opaque) { + return delay < 1e3 + ? setTimeout(callback, delay, opaque) + : new Timeout(callback, delay, opaque) + }, + clearTimeout (timeout) { + if (timeout instanceof Timeout) { + timeout.clear() + } else { + clearTimeout(timeout) + } + } +} diff --git a/node_modules/undici/lib/websocket/connection.js b/node_modules/undici/lib/websocket/connection.js new file mode 100644 index 0000000..e0fa697 --- /dev/null +++ b/node_modules/undici/lib/websocket/connection.js @@ -0,0 +1,291 @@ +'use strict' + +const diagnosticsChannel = require('diagnostics_channel') +const { uid, states } = require('./constants') +const { + kReadyState, + kSentClose, + kByteParser, + kReceivedClose +} = require('./symbols') +const { fireEvent, failWebsocketConnection } = require('./util') +const { CloseEvent } = require('./events') +const { makeRequest } = require('../fetch/request') +const { fetching } = require('../fetch/index') +const { Headers } = require('../fetch/headers') +const { getGlobalDispatcher } = require('../global') +const { kHeadersList } = require('../core/symbols') + +const channels = {} +channels.open = diagnosticsChannel.channel('undici:websocket:open') +channels.close = diagnosticsChannel.channel('undici:websocket:close') +channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = require('crypto') +} catch { + +} + +/** + * @see https://websockets.spec.whatwg.org/#concept-websocket-establish + * @param {URL} url + * @param {string|string[]} protocols + * @param {import('./websocket').WebSocket} ws + * @param {(response: any) => void} onEstablish + * @param {Partial} options + */ +function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { + // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s + // scheme is "ws", and to "https" otherwise. + const requestURL = url + + requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' + + // 2. Let request be a new request, whose URL is requestURL, client is client, + // service-workers mode is "none", referrer is "no-referrer", mode is + // "websocket", credentials mode is "include", cache mode is "no-store" , + // and redirect mode is "error". + const request = makeRequest({ + urlList: [requestURL], + serviceWorkers: 'none', + referrer: 'no-referrer', + mode: 'websocket', + credentials: 'include', + cache: 'no-store', + redirect: 'error' + }) + + // Note: undici extension, allow setting custom headers. + if (options.headers) { + const headersList = new Headers(options.headers)[kHeadersList] + + request.headersList = headersList + } + + // 3. Append (`Upgrade`, `websocket`) to request’s header list. + // 4. Append (`Connection`, `Upgrade`) to request’s header list. + // Note: both of these are handled by undici currently. + // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 + + // 5. Let keyValue be a nonce consisting of a randomly selected + // 16-byte value that has been forgiving-base64-encoded and + // isomorphic encoded. + const keyValue = crypto.randomBytes(16).toString('base64') + + // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s + // header list. + request.headersList.append('sec-websocket-key', keyValue) + + // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s + // header list. + request.headersList.append('sec-websocket-version', '13') + + // 8. For each protocol in protocols, combine + // (`Sec-WebSocket-Protocol`, protocol) in request’s header + // list. + for (const protocol of protocols) { + request.headersList.append('sec-websocket-protocol', protocol) + } + + // 9. Let permessageDeflate be a user-agent defined + // "permessage-deflate" extension header value. + // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 + // TODO: enable once permessage-deflate is supported + const permessageDeflate = '' // 'permessage-deflate; 15' + + // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to + // request’s header list. + // request.headersList.append('sec-websocket-extensions', permessageDeflate) + + // 11. Fetch request with useParallelQueue set to true, and + // processResponse given response being these steps: + const controller = fetching({ + request, + useParallelQueue: true, + dispatcher: options.dispatcher ?? getGlobalDispatcher(), + processResponse (response) { + // 1. If response is a network error or its status is not 101, + // fail the WebSocket connection. + if (response.type === 'error' || response.status !== 101) { + failWebsocketConnection(ws, 'Received network error or non-101 status code.') + return + } + + // 2. If protocols is not the empty list and extracting header + // list values given `Sec-WebSocket-Protocol` and response’s + // header list results in null, failure, or the empty byte + // sequence, then fail the WebSocket connection. + if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Server did not respond with sent protocols.') + return + } + + // 3. Follow the requirements stated step 2 to step 6, inclusive, + // of the last set of steps in section 4.1 of The WebSocket + // Protocol to validate response. This either results in fail + // the WebSocket connection or the WebSocket connection is + // established. + + // 2. If the response lacks an |Upgrade| header field or the |Upgrade| + // header field contains a value that is not an ASCII case- + // insensitive match for the value "websocket", the client MUST + // _Fail the WebSocket Connection_. + if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { + failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') + return + } + + // 3. If the response lacks a |Connection| header field or the + // |Connection| header field doesn't contain a token that is an + // ASCII case-insensitive match for the value "Upgrade", the client + // MUST _Fail the WebSocket Connection_. + if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { + failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') + return + } + + // 4. If the response lacks a |Sec-WebSocket-Accept| header field or + // the |Sec-WebSocket-Accept| contains a value other than the + // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- + // Key| (as a string, not base64-decoded) with the string "258EAFA5- + // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and + // trailing whitespace, the client MUST _Fail the WebSocket + // Connection_. + const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') + const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') + if (secWSAccept !== digest) { + failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') + return + } + + // 5. If the response includes a |Sec-WebSocket-Extensions| header + // field and this header field indicates the use of an extension + // that was not present in the client's handshake (the server has + // indicated an extension not requested by the client), the client + // MUST _Fail the WebSocket Connection_. (The parsing of this + // header field to determine which extensions are requested is + // discussed in Section 9.1.) + const secExtension = response.headersList.get('Sec-WebSocket-Extensions') + + if (secExtension !== null && secExtension !== permessageDeflate) { + failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') + return + } + + // 6. If the response includes a |Sec-WebSocket-Protocol| header field + // and this header field indicates the use of a subprotocol that was + // not present in the client's handshake (the server has indicated a + // subprotocol not requested by the client), the client MUST _Fail + // the WebSocket Connection_. + const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') + + if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') + return + } + + response.socket.on('data', onSocketData) + response.socket.on('close', onSocketClose) + response.socket.on('error', onSocketError) + + if (channels.open.hasSubscribers) { + channels.open.publish({ + address: response.socket.address(), + protocol: secProtocol, + extensions: secExtension + }) + } + + onEstablish(response) + } + }) + + return controller +} + +/** + * @param {Buffer} chunk + */ +function onSocketData (chunk) { + if (!this.ws[kByteParser].write(chunk)) { + this.pause() + } +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 + */ +function onSocketClose () { + const { ws } = this + + // If the TCP connection was closed after the + // WebSocket closing handshake was completed, the WebSocket connection + // is said to have been closed _cleanly_. + const wasClean = ws[kSentClose] && ws[kReceivedClose] + + let code = 1005 + let reason = '' + + const result = ws[kByteParser].closingInfo + + if (result) { + code = result.code ?? 1005 + reason = result.reason + } else if (!ws[kSentClose]) { + // If _The WebSocket + // Connection is Closed_ and no Close control frame was received by the + // endpoint (such as could occur if the underlying transport connection + // is lost), _The WebSocket Connection Close Code_ is considered to be + // 1006. + code = 1006 + } + + // 1. Change the ready state to CLOSED (3). + ws[kReadyState] = states.CLOSED + + // 2. If the user agent was required to fail the WebSocket + // connection, or if the WebSocket connection was closed + // after being flagged as full, fire an event named error + // at the WebSocket object. + // TODO + + // 3. Fire an event named close at the WebSocket object, + // using CloseEvent, with the wasClean attribute + // initialized to true if the connection closed cleanly + // and false otherwise, the code attribute initialized to + // the WebSocket connection close code, and the reason + // attribute initialized to the result of applying UTF-8 + // decode without BOM to the WebSocket connection close + // reason. + fireEvent('close', ws, CloseEvent, { + wasClean, code, reason + }) + + if (channels.close.hasSubscribers) { + channels.close.publish({ + websocket: ws, + code, + reason + }) + } +} + +function onSocketError (error) { + const { ws } = this + + ws[kReadyState] = states.CLOSING + + if (channels.socketError.hasSubscribers) { + channels.socketError.publish(error) + } + + this.destroy() +} + +module.exports = { + establishWebSocketConnection +} diff --git a/node_modules/undici/lib/websocket/constants.js b/node_modules/undici/lib/websocket/constants.js new file mode 100644 index 0000000..406b8e3 --- /dev/null +++ b/node_modules/undici/lib/websocket/constants.js @@ -0,0 +1,51 @@ +'use strict' + +// This is a Globally Unique Identifier unique used +// to validate that the endpoint accepts websocket +// connections. +// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 +const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +const states = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3 +} + +const opcodes = { + CONTINUATION: 0x0, + TEXT: 0x1, + BINARY: 0x2, + CLOSE: 0x8, + PING: 0x9, + PONG: 0xA +} + +const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 + +const parserStates = { + INFO: 0, + PAYLOADLENGTH_16: 2, + PAYLOADLENGTH_64: 3, + READ_DATA: 4 +} + +const emptyBuffer = Buffer.allocUnsafe(0) + +module.exports = { + uid, + staticPropertyDescriptors, + states, + opcodes, + maxUnsigned16Bit, + parserStates, + emptyBuffer +} diff --git a/node_modules/undici/lib/websocket/events.js b/node_modules/undici/lib/websocket/events.js new file mode 100644 index 0000000..621a226 --- /dev/null +++ b/node_modules/undici/lib/websocket/events.js @@ -0,0 +1,303 @@ +'use strict' + +const { webidl } = require('../fetch/webidl') +const { kEnumerableProperty } = require('../core/util') +const { MessagePort } = require('worker_threads') + +/** + * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent + */ +class MessageEvent extends Event { + #eventInit + + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.MessageEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict + } + + get data () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.data + } + + get origin () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.origin + } + + get lastEventId () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.lastEventId + } + + get source () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.source + } + + get ports () { + webidl.brandCheck(this, MessageEvent) + + if (!Object.isFrozen(this.#eventInit.ports)) { + Object.freeze(this.#eventInit.ports) + } + + return this.#eventInit.ports + } + + initMessageEvent ( + type, + bubbles = false, + cancelable = false, + data = null, + origin = '', + lastEventId = '', + source = null, + ports = [] + ) { + webidl.brandCheck(this, MessageEvent) + + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) + + return new MessageEvent(type, { + bubbles, cancelable, data, origin, lastEventId, source, ports + }) + } +} + +/** + * @see https://websockets.spec.whatwg.org/#the-closeevent-interface + */ +class CloseEvent extends Event { + #eventInit + + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.CloseEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict + } + + get wasClean () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.wasClean + } + + get code () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.code + } + + get reason () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.reason + } +} + +// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface +class ErrorEvent extends Event { + #eventInit + + constructor (type, eventInitDict) { + webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) + + super(type, eventInitDict) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) + + this.#eventInit = eventInitDict + } + + get message () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.message + } + + get filename () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.filename + } + + get lineno () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.lineno + } + + get colno () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.colno + } + + get error () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.error + } +} + +Object.defineProperties(MessageEvent.prototype, { + [Symbol.toStringTag]: { + value: 'MessageEvent', + configurable: true + }, + data: kEnumerableProperty, + origin: kEnumerableProperty, + lastEventId: kEnumerableProperty, + source: kEnumerableProperty, + ports: kEnumerableProperty, + initMessageEvent: kEnumerableProperty +}) + +Object.defineProperties(CloseEvent.prototype, { + [Symbol.toStringTag]: { + value: 'CloseEvent', + configurable: true + }, + reason: kEnumerableProperty, + code: kEnumerableProperty, + wasClean: kEnumerableProperty +}) + +Object.defineProperties(ErrorEvent.prototype, { + [Symbol.toStringTag]: { + value: 'ErrorEvent', + configurable: true + }, + message: kEnumerableProperty, + filename: kEnumerableProperty, + lineno: kEnumerableProperty, + colno: kEnumerableProperty, + error: kEnumerableProperty +}) + +webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.MessagePort +) + +const eventInit = [ + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +] + +webidl.converters.MessageEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'data', + converter: webidl.converters.any, + defaultValue: null + }, + { + key: 'origin', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lastEventId', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'source', + // Node doesn't implement WindowProxy or ServiceWorker, so the only + // valid value for source is a MessagePort. + converter: webidl.nullableConverter(webidl.converters.MessagePort), + defaultValue: null + }, + { + key: 'ports', + converter: webidl.converters['sequence'], + get defaultValue () { + return [] + } + } +]) + +webidl.converters.CloseEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'wasClean', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'code', + converter: webidl.converters['unsigned short'], + defaultValue: 0 + }, + { + key: 'reason', + converter: webidl.converters.USVString, + defaultValue: '' + } +]) + +webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'message', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'filename', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lineno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'colno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'error', + converter: webidl.converters.any + } +]) + +module.exports = { + MessageEvent, + CloseEvent, + ErrorEvent +} diff --git a/node_modules/undici/lib/websocket/frame.js b/node_modules/undici/lib/websocket/frame.js new file mode 100644 index 0000000..d867ad1 --- /dev/null +++ b/node_modules/undici/lib/websocket/frame.js @@ -0,0 +1,73 @@ +'use strict' + +const { maxUnsigned16Bit } = require('./constants') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = require('crypto') +} catch { + +} + +class WebsocketFrameSend { + /** + * @param {Buffer|undefined} data + */ + constructor (data) { + this.frameData = data + this.maskKey = crypto.randomBytes(4) + } + + createFrame (opcode) { + const bodyLength = this.frameData?.byteLength ?? 0 + + /** @type {number} */ + let payloadLength = bodyLength // 0-125 + let offset = 6 + + if (bodyLength > maxUnsigned16Bit) { + offset += 8 // payload length is next 8 bytes + payloadLength = 127 + } else if (bodyLength > 125) { + offset += 2 // payload length is next 2 bytes + payloadLength = 126 + } + + const buffer = Buffer.allocUnsafe(bodyLength + offset) + + // Clear first 2 bytes, everything else is overwritten + buffer[0] = buffer[1] = 0 + buffer[0] |= 0x80 // FIN + buffer[0] = (buffer[0] & 0xF0) + opcode // opcode + + /*! ws. MIT License. Einar Otto Stangvik */ + buffer[offset - 4] = this.maskKey[0] + buffer[offset - 3] = this.maskKey[1] + buffer[offset - 2] = this.maskKey[2] + buffer[offset - 1] = this.maskKey[3] + + buffer[1] = payloadLength + + if (payloadLength === 126) { + buffer.writeUInt16BE(bodyLength, 2) + } else if (payloadLength === 127) { + // Clear extended payload length + buffer[2] = buffer[3] = 0 + buffer.writeUIntBE(bodyLength, 4, 6) + } + + buffer[1] |= 0x80 // MASK + + // mask body + for (let i = 0; i < bodyLength; i++) { + buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] + } + + return buffer + } +} + +module.exports = { + WebsocketFrameSend +} diff --git a/node_modules/undici/lib/websocket/receiver.js b/node_modules/undici/lib/websocket/receiver.js new file mode 100644 index 0000000..bdd2031 --- /dev/null +++ b/node_modules/undici/lib/websocket/receiver.js @@ -0,0 +1,344 @@ +'use strict' + +const { Writable } = require('stream') +const diagnosticsChannel = require('diagnostics_channel') +const { parserStates, opcodes, states, emptyBuffer } = require('./constants') +const { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols') +const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require('./util') +const { WebsocketFrameSend } = require('./frame') + +// This code was influenced by ws released under the MIT license. +// Copyright (c) 2011 Einar Otto Stangvik +// Copyright (c) 2013 Arnout Kazemier and contributors +// Copyright (c) 2016 Luigi Pinca and contributors + +const channels = {} +channels.ping = diagnosticsChannel.channel('undici:websocket:ping') +channels.pong = diagnosticsChannel.channel('undici:websocket:pong') + +class ByteParser extends Writable { + #buffers = [] + #byteOffset = 0 + + #state = parserStates.INFO + + #info = {} + #fragments = [] + + constructor (ws) { + super() + + this.ws = ws + } + + /** + * @param {Buffer} chunk + * @param {() => void} callback + */ + _write (chunk, _, callback) { + this.#buffers.push(chunk) + this.#byteOffset += chunk.length + + this.run(callback) + } + + /** + * Runs whenever a new chunk is received. + * Callback is called whenever there are no more chunks buffering, + * or not enough bytes are buffered to parse. + */ + run (callback) { + while (true) { + if (this.#state === parserStates.INFO) { + // If there aren't enough bytes to parse the payload length, etc. + if (this.#byteOffset < 2) { + return callback() + } + + const buffer = this.consume(2) + + this.#info.fin = (buffer[0] & 0x80) !== 0 + this.#info.opcode = buffer[0] & 0x0F + + // If we receive a fragmented message, we use the type of the first + // frame to parse the full message as binary/text, when it's terminated + this.#info.originalOpcode ??= this.#info.opcode + + this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + + if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { + // Only text and binary frames can be fragmented + failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') + return + } + + const payloadLength = buffer[1] & 0x7F + + if (payloadLength <= 125) { + this.#info.payloadLength = payloadLength + this.#state = parserStates.READ_DATA + } else if (payloadLength === 126) { + this.#state = parserStates.PAYLOADLENGTH_16 + } else if (payloadLength === 127) { + this.#state = parserStates.PAYLOADLENGTH_64 + } + + if (this.#info.fragmented && payloadLength > 125) { + // A fragmented frame can't be fragmented itself + failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') + return + } else if ( + (this.#info.opcode === opcodes.PING || + this.#info.opcode === opcodes.PONG || + this.#info.opcode === opcodes.CLOSE) && + payloadLength > 125 + ) { + // Control frames can have a payload length of 125 bytes MAX + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') + return + } else if (this.#info.opcode === opcodes.CLOSE) { + if (payloadLength === 1) { + failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') + return + } + + const body = this.consume(payloadLength) + + this.#info.closeInfo = this.parseCloseBody(false, body) + + if (!this.ws[kSentClose]) { + // If an endpoint receives a Close frame and did not previously send a + // Close frame, the endpoint MUST send a Close frame in response. (When + // sending a Close frame in response, the endpoint typically echos the + // status code it received.) + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(this.#info.closeInfo.code, 0) + const closeFrame = new WebsocketFrameSend(body) + + this.ws[kResponse].socket.write( + closeFrame.createFrame(opcodes.CLOSE), + (err) => { + if (!err) { + this.ws[kSentClose] = true + } + } + ) + } + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this.ws[kReadyState] = states.CLOSING + this.ws[kReceivedClose] = true + + this.end() + + return + } else if (this.#info.opcode === opcodes.PING) { + // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in + // response, unless it already received a Close frame. + // A Pong frame sent in response to a Ping frame must have identical + // "Application data" + + const body = this.consume(payloadLength) + + if (!this.ws[kReceivedClose]) { + const frame = new WebsocketFrameSend(body) + + this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) + + if (channels.ping.hasSubscribers) { + channels.ping.publish({ + payload: body + }) + } + } + + this.#state = parserStates.INFO + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } else if (this.#info.opcode === opcodes.PONG) { + // A Pong frame MAY be sent unsolicited. This serves as a + // unidirectional heartbeat. A response to an unsolicited Pong frame is + // not expected. + + const body = this.consume(payloadLength) + + if (channels.pong.hasSubscribers) { + channels.pong.publish({ + payload: body + }) + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } + } else if (this.#state === parserStates.PAYLOADLENGTH_16) { + if (this.#byteOffset < 2) { + return callback() + } + + const buffer = this.consume(2) + + this.#info.payloadLength = buffer.readUInt16BE(0) + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.PAYLOADLENGTH_64) { + if (this.#byteOffset < 8) { + return callback() + } + + const buffer = this.consume(8) + const upper = buffer.readUInt32BE(0) + + // 2^31 is the maxinimum bytes an arraybuffer can contain + // on 32-bit systems. Although, on 64-bit systems, this is + // 2^53-1 bytes. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e + if (upper > 2 ** 31 - 1) { + failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') + return + } + + const lower = buffer.readUInt32BE(4) + + this.#info.payloadLength = (upper << 8) + lower + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.READ_DATA) { + if (this.#byteOffset < this.#info.payloadLength) { + // If there is still more data in this chunk that needs to be read + return callback() + } else if (this.#byteOffset >= this.#info.payloadLength) { + // If the server sent multiple frames in a single chunk + + const body = this.consume(this.#info.payloadLength) + + this.#fragments.push(body) + + // If the frame is unfragmented, or a fragmented frame was terminated, + // a message was received + if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { + const fullMessage = Buffer.concat(this.#fragments) + + websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) + + this.#info = {} + this.#fragments.length = 0 + } + + this.#state = parserStates.INFO + } + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + break + } + } + } + + /** + * Take n bytes from the buffered Buffers + * @param {number} n + * @returns {Buffer|null} + */ + consume (n) { + if (n > this.#byteOffset) { + return null + } else if (n === 0) { + return emptyBuffer + } + + if (this.#buffers[0].length === n) { + this.#byteOffset -= this.#buffers[0].length + return this.#buffers.shift() + } + + const buffer = Buffer.allocUnsafe(n) + let offset = 0 + + while (offset !== n) { + const next = this.#buffers[0] + const { length } = next + + if (length + offset === n) { + buffer.set(this.#buffers.shift(), offset) + break + } else if (length + offset > n) { + buffer.set(next.subarray(0, n - offset), offset) + this.#buffers[0] = next.subarray(n - offset) + break + } else { + buffer.set(this.#buffers.shift(), offset) + offset += next.length + } + } + + this.#byteOffset -= n + + return buffer + } + + parseCloseBody (onlyCode, data) { + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 + /** @type {number|undefined} */ + let code + + if (data.length >= 2) { + // _The WebSocket Connection Close Code_ is + // defined as the status code (Section 7.4) contained in the first Close + // control frame received by the application + code = data.readUInt16BE(0) + } + + if (onlyCode) { + if (!isValidStatusCode(code)) { + return null + } + + return { code } + } + + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 + /** @type {Buffer} */ + let reason = data.subarray(2) + + // Remove BOM + if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { + reason = reason.subarray(3) + } + + if (code !== undefined && !isValidStatusCode(code)) { + return null + } + + try { + // TODO: optimize this + reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) + } catch { + return null + } + + return { code, reason } + } + + get closingInfo () { + return this.#info.closeInfo + } +} + +module.exports = { + ByteParser +} diff --git a/node_modules/undici/lib/websocket/symbols.js b/node_modules/undici/lib/websocket/symbols.js new file mode 100644 index 0000000..11d03e3 --- /dev/null +++ b/node_modules/undici/lib/websocket/symbols.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = { + kWebSocketURL: Symbol('url'), + kReadyState: Symbol('ready state'), + kController: Symbol('controller'), + kResponse: Symbol('response'), + kBinaryType: Symbol('binary type'), + kSentClose: Symbol('sent close'), + kReceivedClose: Symbol('received close'), + kByteParser: Symbol('byte parser') +} diff --git a/node_modules/undici/lib/websocket/util.js b/node_modules/undici/lib/websocket/util.js new file mode 100644 index 0000000..6c59b2c --- /dev/null +++ b/node_modules/undici/lib/websocket/util.js @@ -0,0 +1,200 @@ +'use strict' + +const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = require('./symbols') +const { states, opcodes } = require('./constants') +const { MessageEvent, ErrorEvent } = require('./events') + +/* globals Blob */ + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isEstablished (ws) { + // If the server's response is validated as provided for above, it is + // said that _The WebSocket Connection is Established_ and that the + // WebSocket Connection is in the OPEN state. + return ws[kReadyState] === states.OPEN +} + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosing (ws) { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + return ws[kReadyState] === states.CLOSING +} + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosed (ws) { + return ws[kReadyState] === states.CLOSED +} + +/** + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e + * @param {EventTarget} target + * @param {EventInit | undefined} eventInitDict + */ +function fireEvent (e, target, eventConstructor = Event, eventInitDict) { + // 1. If eventConstructor is not given, then let eventConstructor be Event. + + // 2. Let event be the result of creating an event given eventConstructor, + // in the relevant realm of target. + // 3. Initialize event’s type attribute to e. + const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap + + // 4. Initialize any other IDL attributes of event as described in the + // invocation of this algorithm. + + // 5. Return the result of dispatching event at target, with legacy target + // override flag set if set. + target.dispatchEvent(event) +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @param {import('./websocket').WebSocket} ws + * @param {number} type Opcode + * @param {Buffer} data application data + */ +function websocketMessageReceived (ws, type, data) { + // 1. If ready state is not OPEN (1), then return. + if (ws[kReadyState] !== states.OPEN) { + return + } + + // 2. Let dataForEvent be determined by switching on type and binary type: + let dataForEvent + + if (type === opcodes.TEXT) { + // -> type indicates that the data is Text + // a new DOMString containing data + try { + dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) + } catch { + failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') + return + } + } else if (type === opcodes.BINARY) { + if (ws[kBinaryType] === 'blob') { + // -> type indicates that the data is Binary and binary type is "blob" + // a new Blob object, created in the relevant Realm of the WebSocket + // object, that represents data as its raw data + dataForEvent = new Blob([data]) + } else { + // -> type indicates that the data is Binary and binary type is "arraybuffer" + // a new ArrayBuffer object, created in the relevant Realm of the + // WebSocket object, whose contents are data + dataForEvent = new Uint8Array(data).buffer + } + } + + // 3. Fire an event named message at the WebSocket object, using MessageEvent, + // with the origin attribute initialized to the serialization of the WebSocket + // object’s url's origin, and the data attribute initialized to dataForEvent. + fireEvent('message', ws, MessageEvent, { + origin: ws[kWebSocketURL].origin, + data: dataForEvent + }) +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455 + * @see https://datatracker.ietf.org/doc/html/rfc2616 + * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 + * @param {string} protocol + */ +function isValidSubprotocol (protocol) { + // If present, this value indicates one + // or more comma-separated subprotocol the client wishes to speak, + // ordered by preference. The elements that comprise this value + // MUST be non-empty strings with characters in the range U+0021 to + // U+007E not including separator characters as defined in + // [RFC2616] and MUST all be unique strings. + if (protocol.length === 0) { + return false + } + + for (const char of protocol) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || + code > 0x7E || + char === '(' || + char === ')' || + char === '<' || + char === '>' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' || + code === 32 || // SP + code === 9 // HT + ) { + return false + } + } + + return true +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 + * @param {number} code + */ +function isValidStatusCode (code) { + if (code >= 1000 && code < 1015) { + return ( + code !== 1004 && // reserved + code !== 1005 && // "MUST NOT be set as a status code" + code !== 1006 // "MUST NOT be set as a status code" + ) + } + + return code >= 3000 && code <= 4999 +} + +/** + * @param {import('./websocket').WebSocket} ws + * @param {string|undefined} reason + */ +function failWebsocketConnection (ws, reason) { + const { [kController]: controller, [kResponse]: response } = ws + + controller.abort() + + if (response?.socket && !response.socket.destroyed) { + response.socket.destroy() + } + + if (reason) { + fireEvent('error', ws, ErrorEvent, { + error: new Error(reason) + }) + } +} + +module.exports = { + isEstablished, + isClosing, + isClosed, + fireEvent, + isValidSubprotocol, + isValidStatusCode, + failWebsocketConnection, + websocketMessageReceived +} diff --git a/node_modules/undici/lib/websocket/websocket.js b/node_modules/undici/lib/websocket/websocket.js new file mode 100644 index 0000000..e4aa58f --- /dev/null +++ b/node_modules/undici/lib/websocket/websocket.js @@ -0,0 +1,641 @@ +'use strict' + +const { webidl } = require('../fetch/webidl') +const { DOMException } = require('../fetch/constants') +const { URLSerializer } = require('../fetch/dataURL') +const { getGlobalOrigin } = require('../fetch/global') +const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require('./constants') +const { + kWebSocketURL, + kReadyState, + kController, + kBinaryType, + kResponse, + kSentClose, + kByteParser +} = require('./symbols') +const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require('./util') +const { establishWebSocketConnection } = require('./connection') +const { WebsocketFrameSend } = require('./frame') +const { ByteParser } = require('./receiver') +const { kEnumerableProperty, isBlobLike } = require('../core/util') +const { getGlobalDispatcher } = require('../global') +const { types } = require('util') + +let experimentalWarned = false + +// https://websockets.spec.whatwg.org/#interface-definition +class WebSocket extends EventTarget { + #events = { + open: null, + error: null, + close: null, + message: null + } + + #bufferedAmount = 0 + #protocol = '' + #extensions = '' + + /** + * @param {string} url + * @param {string|string[]} protocols + */ + constructor (url, protocols = []) { + super() + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) + + if (!experimentalWarned) { + experimentalWarned = true + process.emitWarning('WebSockets are experimental, expect them to change at any time.', { + code: 'UNDICI-WS' + }) + } + + const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) + + url = webidl.converters.USVString(url) + protocols = options.protocols + + // 1. Let baseURL be this's relevant settings object's API base URL. + const baseURL = getGlobalOrigin() + + // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. + let urlRecord + + try { + urlRecord = new URL(url, baseURL) + } catch (e) { + // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. + throw new DOMException(e, 'SyntaxError') + } + + // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". + if (urlRecord.protocol === 'http:') { + urlRecord.protocol = 'ws:' + } else if (urlRecord.protocol === 'https:') { + // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". + urlRecord.protocol = 'wss:' + } + + // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. + if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { + throw new DOMException( + `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, + 'SyntaxError' + ) + } + + // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" + // DOMException. + if (urlRecord.hash || urlRecord.href.endsWith('#')) { + throw new DOMException('Got fragment', 'SyntaxError') + } + + // 8. If protocols is a string, set protocols to a sequence consisting + // of just that string. + if (typeof protocols === 'string') { + protocols = [protocols] + } + + // 9. If any of the values in protocols occur more than once or otherwise + // fail to match the requirements for elements that comprise the value + // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket + // protocol, then throw a "SyntaxError" DOMException. + if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } + + if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } + + // 10. Set this's url to urlRecord. + this[kWebSocketURL] = new URL(urlRecord.href) + + // 11. Let client be this's relevant settings object. + + // 12. Run this step in parallel: + + // 1. Establish a WebSocket connection given urlRecord, protocols, + // and client. + this[kController] = establishWebSocketConnection( + urlRecord, + protocols, + this, + (response) => this.#onConnectionEstablished(response), + options + ) + + // Each WebSocket object has an associated ready state, which is a + // number representing the state of the connection. Initially it must + // be CONNECTING (0). + this[kReadyState] = WebSocket.CONNECTING + + // The extensions attribute must initially return the empty string. + + // The protocol attribute must initially return the empty string. + + // Each WebSocket object has an associated binary type, which is a + // BinaryType. Initially it must be "blob". + this[kBinaryType] = 'blob' + } + + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-close + * @param {number|undefined} code + * @param {string|undefined} reason + */ + close (code = undefined, reason = undefined) { + webidl.brandCheck(this, WebSocket) + + if (code !== undefined) { + code = webidl.converters['unsigned short'](code, { clamp: true }) + } + + if (reason !== undefined) { + reason = webidl.converters.USVString(reason) + } + + // 1. If code is present, but is neither an integer equal to 1000 nor an + // integer in the range 3000 to 4999, inclusive, throw an + // "InvalidAccessError" DOMException. + if (code !== undefined) { + if (code !== 1000 && (code < 3000 || code > 4999)) { + throw new DOMException('invalid code', 'InvalidAccessError') + } + } + + let reasonByteLength = 0 + + // 2. If reason is present, then run these substeps: + if (reason !== undefined) { + // 1. Let reasonBytes be the result of encoding reason. + // 2. If reasonBytes is longer than 123 bytes, then throw a + // "SyntaxError" DOMException. + reasonByteLength = Buffer.byteLength(reason) + + if (reasonByteLength > 123) { + throw new DOMException( + `Reason must be less than 123 bytes; received ${reasonByteLength}`, + 'SyntaxError' + ) + } + } + + // 3. Run the first matching steps from the following list: + if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(this)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(this, 'Connection was closed before it was established.') + this[kReadyState] = WebSocket.CLOSING + } else if (!isClosing(this)) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + + const frame = new WebsocketFrameSend() + + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + this[kSentClose] = true + } + }) + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + this[kReadyState] = WebSocket.CLOSING + } + } + + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-send + * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data + */ + send (data) { + webidl.brandCheck(this, WebSocket) + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) + + data = webidl.converters.WebSocketSendData(data) + + // 1. If this's ready state is CONNECTING, then throw an + // "InvalidStateError" DOMException. + if (this[kReadyState] === WebSocket.CONNECTING) { + throw new DOMException('Sent before connected.', 'InvalidStateError') + } + + // 2. Run the appropriate set of steps from the following list: + // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + + if (!isEstablished(this) || isClosing(this)) { + return + } + + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + // If data is a string + if (typeof data === 'string') { + // If the WebSocket connection is established and the WebSocket + // closing handshake has not yet started, then the user agent + // must send a WebSocket Message comprised of the data argument + // using a text frame opcode; if the data cannot be sent, e.g. + // because it would need to be buffered but the buffer is full, + // the user agent must flag the WebSocket as full and then close + // the WebSocket connection. Any invocation of this method with a + // string argument that does not throw an exception must increase + // the bufferedAmount attribute by the number of bytes needed to + // express the argument as UTF-8. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.TEXT) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (types.isArrayBuffer(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need + // to be buffered but the buffer is full, the user agent must flag + // the WebSocket as full and then close the WebSocket connection. + // The data to be sent is the data stored in the buffer described + // by the ArrayBuffer object. Any invocation of this method with an + // ArrayBuffer argument that does not throw an exception must + // increase the bufferedAmount attribute by the length of the + // ArrayBuffer in bytes. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (ArrayBuffer.isView(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The + // data to be sent is the data stored in the section of the buffer + // described by the ArrayBuffer object that data references. Any + // invocation of this method with this kind of argument that does + // not throw an exception must increase the bufferedAmount attribute + // by the length of data’s buffer in bytes. + + const ab = Buffer.from(data, data.byteOffset, data.byteLength) + + const frame = new WebsocketFrameSend(ab) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += ab.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= ab.byteLength + }) + } else if (isBlobLike(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The data + // to be sent is the raw data represented by the Blob object. Any + // invocation of this method with a Blob argument that does not throw + // an exception must increase the bufferedAmount attribute by the size + // of the Blob object’s raw data, in bytes. + + const frame = new WebsocketFrameSend() + + data.arrayBuffer().then((ab) => { + const value = Buffer.from(ab) + frame.frameData = value + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + }) + } + } + + get readyState () { + webidl.brandCheck(this, WebSocket) + + // The readyState getter steps are to return this's ready state. + return this[kReadyState] + } + + get bufferedAmount () { + webidl.brandCheck(this, WebSocket) + + return this.#bufferedAmount + } + + get url () { + webidl.brandCheck(this, WebSocket) + + // The url getter steps are to return this's url, serialized. + return URLSerializer(this[kWebSocketURL]) + } + + get extensions () { + webidl.brandCheck(this, WebSocket) + + return this.#extensions + } + + get protocol () { + webidl.brandCheck(this, WebSocket) + + return this.#protocol + } + + get onopen () { + webidl.brandCheck(this, WebSocket) + + return this.#events.open + } + + set onopen (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.open) { + this.removeEventListener('open', this.#events.open) + } + + if (typeof fn === 'function') { + this.#events.open = fn + this.addEventListener('open', fn) + } else { + this.#events.open = null + } + } + + get onerror () { + webidl.brandCheck(this, WebSocket) + + return this.#events.error + } + + set onerror (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.error) { + this.removeEventListener('error', this.#events.error) + } + + if (typeof fn === 'function') { + this.#events.error = fn + this.addEventListener('error', fn) + } else { + this.#events.error = null + } + } + + get onclose () { + webidl.brandCheck(this, WebSocket) + + return this.#events.close + } + + set onclose (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.close) { + this.removeEventListener('close', this.#events.close) + } + + if (typeof fn === 'function') { + this.#events.close = fn + this.addEventListener('close', fn) + } else { + this.#events.close = null + } + } + + get onmessage () { + webidl.brandCheck(this, WebSocket) + + return this.#events.message + } + + set onmessage (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.message) { + this.removeEventListener('message', this.#events.message) + } + + if (typeof fn === 'function') { + this.#events.message = fn + this.addEventListener('message', fn) + } else { + this.#events.message = null + } + } + + get binaryType () { + webidl.brandCheck(this, WebSocket) + + return this[kBinaryType] + } + + set binaryType (type) { + webidl.brandCheck(this, WebSocket) + + if (type !== 'blob' && type !== 'arraybuffer') { + this[kBinaryType] = 'blob' + } else { + this[kBinaryType] = type + } + } + + /** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + */ + #onConnectionEstablished (response) { + // processResponse is called when the "response’s header list has been received and initialized." + // once this happens, the connection is open + this[kResponse] = response + + const parser = new ByteParser(this) + parser.on('drain', function onParserDrain () { + this.ws[kResponse].socket.resume() + }) + + response.socket.ws = this + this[kByteParser] = parser + + // 1. Change the ready state to OPEN (1). + this[kReadyState] = states.OPEN + + // 2. Change the extensions attribute’s value to the extensions in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 + const extensions = response.headersList.get('sec-websocket-extensions') + + if (extensions !== null) { + this.#extensions = extensions + } + + // 3. Change the protocol attribute’s value to the subprotocol in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 + const protocol = response.headersList.get('sec-websocket-protocol') + + if (protocol !== null) { + this.#protocol = protocol + } + + // 4. Fire an event named open at the WebSocket object. + fireEvent('open', this) + } +} + +// https://websockets.spec.whatwg.org/#dom-websocket-connecting +WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING +// https://websockets.spec.whatwg.org/#dom-websocket-open +WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN +// https://websockets.spec.whatwg.org/#dom-websocket-closing +WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING +// https://websockets.spec.whatwg.org/#dom-websocket-closed +WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED + +Object.defineProperties(WebSocket.prototype, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors, + url: kEnumerableProperty, + readyState: kEnumerableProperty, + bufferedAmount: kEnumerableProperty, + onopen: kEnumerableProperty, + onerror: kEnumerableProperty, + onclose: kEnumerableProperty, + close: kEnumerableProperty, + onmessage: kEnumerableProperty, + binaryType: kEnumerableProperty, + send: kEnumerableProperty, + extensions: kEnumerableProperty, + protocol: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'WebSocket', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(WebSocket, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors +}) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.DOMString +) + +webidl.converters['DOMString or sequence'] = function (V) { + if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { + return webidl.converters['sequence'](V) + } + + return webidl.converters.DOMString(V) +} + +// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 +webidl.converters.WebSocketInit = webidl.dictionaryConverter([ + { + key: 'protocols', + converter: webidl.converters['DOMString or sequence'], + get defaultValue () { + return [] + } + }, + { + key: 'dispatcher', + converter: (V) => V, + get defaultValue () { + return getGlobalDispatcher() + } + }, + { + key: 'headers', + converter: webidl.nullableConverter(webidl.converters.HeadersInit) + } +]) + +webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { + if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { + return webidl.converters.WebSocketInit(V) + } + + return { protocols: webidl.converters['DOMString or sequence'](V) } +} + +webidl.converters.WebSocketSendData = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + return webidl.converters.BufferSource(V) + } + } + + return webidl.converters.USVString(V) +} + +module.exports = { + WebSocket +} diff --git a/node_modules/undici/package.json b/node_modules/undici/package.json new file mode 100644 index 0000000..15a9436 --- /dev/null +++ b/node_modules/undici/package.json @@ -0,0 +1,167 @@ +{ + "name": "undici", + "version": "5.28.3", + "description": "An HTTP/1.1 client, written from scratch for Node.js", + "homepage": "https://undici.nodejs.org", + "bugs": { + "url": "https://github.com/nodejs/undici/issues" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/nodejs/undici.git" + }, + "license": "MIT", + "contributors": [ + { + "name": "Daniele Belardi", + "url": "https://github.com/dnlup", + "author": true + }, + { + "name": "Ethan Arrowood", + "url": "https://github.com/ethan-arrowood", + "author": true + }, + { + "name": "Matteo Collina", + "url": "https://github.com/mcollina", + "author": true + }, + { + "name": "Matthew Aitken", + "url": "https://github.com/KhafraDev", + "author": true + }, + { + "name": "Robert Nagy", + "url": "https://github.com/ronag", + "author": true + }, + { + "name": "Szymon Marczak", + "url": "https://github.com/szmarczak", + "author": true + }, + { + "name": "Tomas Della Vedova", + "url": "https://github.com/delvedor", + "author": true + } + ], + "keywords": [ + "fetch", + "http", + "https", + "promise", + "request", + "curl", + "wget", + "xhr", + "whatwg" + ], + "main": "index.js", + "types": "index.d.ts", + "files": [ + "*.d.ts", + "index.js", + "index-fetch.js", + "lib", + "types", + "docs" + ], + "scripts": { + "build:node": "npx esbuild@0.19.4 index-fetch.js --bundle --platform=node --outfile=undici-fetch.js --define:esbuildDetection=1 --keep-names", + "prebuild:wasm": "node build/wasm.js --prebuild", + "build:wasm": "node build/wasm.js --docker", + "lint": "standard | snazzy", + "lint:fix": "standard --fix | snazzy", + "test": "node scripts/generate-pem && npm run test:tap && npm run test:node-fetch && npm run test:fetch && npm run test:cookies && npm run test:wpt && npm run test:websocket && npm run test:jest && npm run test:typescript", + "test:cookies": "node scripts/verifyVersion 16 || tap test/cookie/*.js", + "test:node-fetch": "node scripts/verifyVersion.js 16 || mocha --exit test/node-fetch", + "test:fetch": "node scripts/verifyVersion.js 16 || (npm run build:node && tap --expose-gc test/fetch/*.js && tap test/webidl/*.js)", + "test:jest": "node scripts/verifyVersion.js 14 || jest", + "test:tap": "tap test/*.js test/diagnostics-channel/*.js", + "test:tdd": "tap test/*.js test/diagnostics-channel/*.js -w", + "test:typescript": "node scripts/verifyVersion.js 14 || tsd && tsc --skipLibCheck test/imports/undici-import.ts", + "test:websocket": "node scripts/verifyVersion.js 18 || tap test/websocket/*.js", + "test:wpt": "node scripts/verifyVersion 18 || (node test/wpt/start-fetch.mjs && node test/wpt/start-FileAPI.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-websockets.mjs)", + "coverage": "nyc --reporter=text --reporter=html npm run test", + "coverage:ci": "nyc --reporter=lcov npm run test", + "bench": "PORT=3042 concurrently -k -s first npm:bench:server npm:bench:run", + "bench:server": "node benchmarks/server.js", + "prebench:run": "node benchmarks/wait.js", + "bench:run": "CONNECTIONS=1 node benchmarks/benchmark.js; CONNECTIONS=50 node benchmarks/benchmark.js", + "serve:website": "docsify serve .", + "prepare": "husky install", + "fuzz": "jsfuzz test/fuzzing/fuzz.js corpus" + }, + "devDependencies": { + "@sinonjs/fake-timers": "^11.1.0", + "@types/node": "^18.0.3", + "abort-controller": "^3.0.0", + "atomic-sleep": "^1.0.0", + "chai": "^4.3.4", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^3.0.2", + "chai-string": "^1.5.0", + "concurrently": "^8.0.1", + "cronometro": "^1.0.5", + "delay": "^5.0.0", + "dns-packet": "^5.4.0", + "docsify-cli": "^4.4.3", + "form-data": "^4.0.0", + "formdata-node": "^6.0.3", + "https-pem": "^3.0.0", + "husky": "^8.0.1", + "import-fresh": "^3.3.0", + "jest": "^29.0.2", + "jsdom": "^23.0.0", + "jsfuzz": "^1.0.15", + "mocha": "^10.0.0", + "mockttp": "^3.9.2", + "p-timeout": "^3.2.0", + "pre-commit": "^1.2.2", + "proxy": "^1.0.2", + "proxyquire": "^2.1.3", + "semver": "^7.5.4", + "sinon": "^17.0.1", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "table": "^6.8.0", + "tap": "^16.1.0", + "tsd": "^0.29.0", + "typescript": "^5.0.2", + "wait-on": "^7.0.1", + "ws": "^8.11.0" + }, + "engines": { + "node": ">=14.0" + }, + "standard": { + "env": [ + "mocha" + ], + "ignore": [ + "lib/llhttp/constants.js", + "lib/llhttp/utils.js", + "test/wpt/tests" + ] + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": true, + "lib": [ + "esnext" + ] + } + }, + "jest": { + "testMatch": [ + "/test/jest/**" + ] + }, + "dependencies": { + "@fastify/busboy": "^2.0.0" + } +} diff --git a/node_modules/undici/types/README.md b/node_modules/undici/types/README.md new file mode 100644 index 0000000..20a721c --- /dev/null +++ b/node_modules/undici/types/README.md @@ -0,0 +1,6 @@ +# undici-types + +This package is a dual-publish of the [undici](https://www.npmjs.com/package/undici) library types. The `undici` package **still contains types**. This package is for users who _only_ need undici types (such as for `@types/node`). It is published alongside every release of `undici`, so you can always use the same version. + +- [GitHub nodejs/undici](https://github.com/nodejs/undici) +- [Undici Documentation](https://undici.nodejs.org/#/) diff --git a/node_modules/undici/types/agent.d.ts b/node_modules/undici/types/agent.d.ts new file mode 100644 index 0000000..58081ce --- /dev/null +++ b/node_modules/undici/types/agent.d.ts @@ -0,0 +1,31 @@ +import { URL } from 'url' +import Pool from './pool' +import Dispatcher from "./dispatcher"; + +export default Agent + +declare class Agent extends Dispatcher{ + constructor(opts?: Agent.Options) + /** `true` after `dispatcher.close()` has been called. */ + closed: boolean; + /** `true` after `dispatcher.destroyed()` has been called or `dispatcher.close()` has been called and the dispatcher shutdown has completed. */ + destroyed: boolean; + /** Dispatches a request. */ + dispatch(options: Agent.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; +} + +declare namespace Agent { + export interface Options extends Pool.Options { + /** Default: `(origin, opts) => new Pool(origin, opts)`. */ + factory?(origin: string | URL, opts: Object): Dispatcher; + /** Integer. Default: `0` */ + maxRedirections?: number; + + interceptors?: { Agent?: readonly Dispatcher.DispatchInterceptor[] } & Pool.Options["interceptors"] + } + + export interface DispatchOptions extends Dispatcher.DispatchOptions { + /** Integer. */ + maxRedirections?: number; + } +} diff --git a/node_modules/undici/types/api.d.ts b/node_modules/undici/types/api.d.ts new file mode 100644 index 0000000..400341d --- /dev/null +++ b/node_modules/undici/types/api.d.ts @@ -0,0 +1,43 @@ +import { URL, UrlObject } from 'url' +import { Duplex } from 'stream' +import Dispatcher from './dispatcher' + +export { + request, + stream, + pipeline, + connect, + upgrade, +} + +/** Performs an HTTP request. */ +declare function request( + url: string | URL | UrlObject, + options?: { dispatcher?: Dispatcher } & Omit & Partial>, +): Promise; + +/** A faster version of `request`. */ +declare function stream( + url: string | URL | UrlObject, + options: { dispatcher?: Dispatcher } & Omit, + factory: Dispatcher.StreamFactory +): Promise; + +/** For easy use with `stream.pipeline`. */ +declare function pipeline( + url: string | URL | UrlObject, + options: { dispatcher?: Dispatcher } & Omit, + handler: Dispatcher.PipelineHandler +): Duplex; + +/** Starts two-way communications with the requested resource. */ +declare function connect( + url: string | URL | UrlObject, + options?: { dispatcher?: Dispatcher } & Omit +): Promise; + +/** Upgrade to a different protocol. */ +declare function upgrade( + url: string | URL | UrlObject, + options?: { dispatcher?: Dispatcher } & Omit +): Promise; diff --git a/node_modules/undici/types/balanced-pool.d.ts b/node_modules/undici/types/balanced-pool.d.ts new file mode 100644 index 0000000..d1e9375 --- /dev/null +++ b/node_modules/undici/types/balanced-pool.d.ts @@ -0,0 +1,18 @@ +import Pool from './pool' +import Dispatcher from './dispatcher' +import { URL } from 'url' + +export default BalancedPool + +declare class BalancedPool extends Dispatcher { + constructor(url: string | string[] | URL | URL[], options?: Pool.Options); + + addUpstream(upstream: string | URL): BalancedPool; + removeUpstream(upstream: string | URL): BalancedPool; + upstreams: Array; + + /** `true` after `pool.close()` has been called. */ + closed: boolean; + /** `true` after `pool.destroyed()` has been called or `pool.close()` has been called and the pool shutdown has completed. */ + destroyed: boolean; +} diff --git a/node_modules/undici/types/cache.d.ts b/node_modules/undici/types/cache.d.ts new file mode 100644 index 0000000..4c33335 --- /dev/null +++ b/node_modules/undici/types/cache.d.ts @@ -0,0 +1,36 @@ +import type { RequestInfo, Response, Request } from './fetch' + +export interface CacheStorage { + match (request: RequestInfo, options?: MultiCacheQueryOptions): Promise, + has (cacheName: string): Promise, + open (cacheName: string): Promise, + delete (cacheName: string): Promise, + keys (): Promise +} + +declare const CacheStorage: { + prototype: CacheStorage + new(): CacheStorage +} + +export interface Cache { + match (request: RequestInfo, options?: CacheQueryOptions): Promise, + matchAll (request?: RequestInfo, options?: CacheQueryOptions): Promise, + add (request: RequestInfo): Promise, + addAll (requests: RequestInfo[]): Promise, + put (request: RequestInfo, response: Response): Promise, + delete (request: RequestInfo, options?: CacheQueryOptions): Promise, + keys (request?: RequestInfo, options?: CacheQueryOptions): Promise +} + +export interface CacheQueryOptions { + ignoreSearch?: boolean, + ignoreMethod?: boolean, + ignoreVary?: boolean +} + +export interface MultiCacheQueryOptions extends CacheQueryOptions { + cacheName?: string +} + +export declare const caches: CacheStorage diff --git a/node_modules/undici/types/client.d.ts b/node_modules/undici/types/client.d.ts new file mode 100644 index 0000000..56e78cc --- /dev/null +++ b/node_modules/undici/types/client.d.ts @@ -0,0 +1,97 @@ +import { URL } from 'url' +import { TlsOptions } from 'tls' +import Dispatcher from './dispatcher' +import buildConnector from "./connector"; + +/** + * A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default. + */ +export class Client extends Dispatcher { + constructor(url: string | URL, options?: Client.Options); + /** Property to get and set the pipelining factor. */ + pipelining: number; + /** `true` after `client.close()` has been called. */ + closed: boolean; + /** `true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed. */ + destroyed: boolean; +} + +export declare namespace Client { + export interface OptionsInterceptors { + Client: readonly Dispatcher.DispatchInterceptor[]; + } + export interface Options { + /** TODO */ + interceptors?: OptionsInterceptors; + /** The maximum length of request headers in bytes. Default: Node.js' `--max-http-header-size` or `16384` (16KiB). */ + maxHeaderSize?: number; + /** The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers (Node 14 and above only). Default: `300e3` milliseconds (300s). */ + headersTimeout?: number; + /** @deprecated unsupported socketTimeout, use headersTimeout & bodyTimeout instead */ + socketTimeout?: never; + /** @deprecated unsupported requestTimeout, use headersTimeout & bodyTimeout instead */ + requestTimeout?: never; + /** TODO */ + connectTimeout?: number; + /** The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Default: `300e3` milliseconds (300s). */ + bodyTimeout?: number; + /** @deprecated unsupported idleTimeout, use keepAliveTimeout instead */ + idleTimeout?: never; + /** @deprecated unsupported keepAlive, use pipelining=0 instead */ + keepAlive?: never; + /** the timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. Default: `4e3` milliseconds (4s). */ + keepAliveTimeout?: number; + /** @deprecated unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead */ + maxKeepAliveTimeout?: never; + /** the maximum allowed `idleTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Default: `600e3` milliseconds (10min). */ + keepAliveMaxTimeout?: number; + /** A number of milliseconds subtracted from server *keep-alive* hints when overriding `idleTimeout` to account for timing inaccuracies caused by e.g. transport latency. Default: `1e3` milliseconds (1s). */ + keepAliveTimeoutThreshold?: number; + /** TODO */ + socketPath?: string; + /** The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Default: `1`. */ + pipelining?: number; + /** @deprecated use the connect option instead */ + tls?: never; + /** If `true`, an error is thrown when the request content-length header doesn't match the length of the request body. Default: `true`. */ + strictContentLength?: boolean; + /** TODO */ + maxCachedSessions?: number; + /** TODO */ + maxRedirections?: number; + /** TODO */ + connect?: buildConnector.BuildOptions | buildConnector.connector; + /** TODO */ + maxRequestsPerClient?: number; + /** TODO */ + localAddress?: string; + /** Max response body size in bytes, -1 is disabled */ + maxResponseSize?: number; + /** Enables a family autodetection algorithm that loosely implements section 5 of RFC 8305. */ + autoSelectFamily?: boolean; + /** The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. */ + autoSelectFamilyAttemptTimeout?: number; + /** + * @description Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation. + * @default false + */ + allowH2?: boolean; + /** + * @description Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame. + * @default 100 + */ + maxConcurrentStreams?: number + } + export interface SocketInfo { + localAddress?: string + localPort?: number + remoteAddress?: string + remotePort?: number + remoteFamily?: string + timeout?: number + bytesWritten?: number + bytesRead?: number + } +} + +export default Client; diff --git a/node_modules/undici/types/connector.d.ts b/node_modules/undici/types/connector.d.ts new file mode 100644 index 0000000..bd92433 --- /dev/null +++ b/node_modules/undici/types/connector.d.ts @@ -0,0 +1,34 @@ +import { TLSSocket, ConnectionOptions } from 'tls' +import { IpcNetConnectOpts, Socket, TcpNetConnectOpts } from 'net' + +export default buildConnector +declare function buildConnector (options?: buildConnector.BuildOptions): buildConnector.connector + +declare namespace buildConnector { + export type BuildOptions = (ConnectionOptions | TcpNetConnectOpts | IpcNetConnectOpts) & { + allowH2?: boolean; + maxCachedSessions?: number | null; + socketPath?: string | null; + timeout?: number | null; + port?: number; + keepAlive?: boolean | null; + keepAliveInitialDelay?: number | null; + } + + export interface Options { + hostname: string + host?: string + protocol: string + port: string + servername?: string + localAddress?: string | null + httpSocket?: Socket + } + + export type Callback = (...args: CallbackArgs) => void + type CallbackArgs = [null, Socket | TLSSocket] | [Error, null] + + export interface connector { + (options: buildConnector.Options, callback: buildConnector.Callback): void + } +} diff --git a/node_modules/undici/types/content-type.d.ts b/node_modules/undici/types/content-type.d.ts new file mode 100644 index 0000000..f2a87f1 --- /dev/null +++ b/node_modules/undici/types/content-type.d.ts @@ -0,0 +1,21 @@ +/// + +interface MIMEType { + type: string + subtype: string + parameters: Map + essence: string +} + +/** + * Parse a string to a {@link MIMEType} object. Returns `failure` if the string + * couldn't be parsed. + * @see https://mimesniff.spec.whatwg.org/#parse-a-mime-type + */ +export function parseMIMEType (input: string): 'failure' | MIMEType + +/** + * Convert a MIMEType object to a string. + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +export function serializeAMimeType (mimeType: MIMEType): string diff --git a/node_modules/undici/types/cookies.d.ts b/node_modules/undici/types/cookies.d.ts new file mode 100644 index 0000000..aa38cae --- /dev/null +++ b/node_modules/undici/types/cookies.d.ts @@ -0,0 +1,28 @@ +/// + +import type { Headers } from './fetch' + +export interface Cookie { + name: string + value: string + expires?: Date | number + maxAge?: number + domain?: string + path?: string + secure?: boolean + httpOnly?: boolean + sameSite?: 'Strict' | 'Lax' | 'None' + unparsed?: string[] +} + +export function deleteCookie ( + headers: Headers, + name: string, + attributes?: { name?: string, domain?: string } +): void + +export function getCookies (headers: Headers): Record + +export function getSetCookies (headers: Headers): Cookie[] + +export function setCookie (headers: Headers, cookie: Cookie): void diff --git a/node_modules/undici/types/diagnostics-channel.d.ts b/node_modules/undici/types/diagnostics-channel.d.ts new file mode 100644 index 0000000..85d4482 --- /dev/null +++ b/node_modules/undici/types/diagnostics-channel.d.ts @@ -0,0 +1,67 @@ +import { Socket } from "net"; +import { URL } from "url"; +import Connector from "./connector"; +import Dispatcher from "./dispatcher"; + +declare namespace DiagnosticsChannel { + interface Request { + origin?: string | URL; + completed: boolean; + method?: Dispatcher.HttpMethod; + path: string; + headers: string; + addHeader(key: string, value: string): Request; + } + interface Response { + statusCode: number; + statusText: string; + headers: Array; + } + type Error = unknown; + interface ConnectParams { + host: URL["host"]; + hostname: URL["hostname"]; + protocol: URL["protocol"]; + port: URL["port"]; + servername: string | null; + } + type Connector = Connector.connector; + export interface RequestCreateMessage { + request: Request; + } + export interface RequestBodySentMessage { + request: Request; + } + export interface RequestHeadersMessage { + request: Request; + response: Response; + } + export interface RequestTrailersMessage { + request: Request; + trailers: Array; + } + export interface RequestErrorMessage { + request: Request; + error: Error; + } + export interface ClientSendHeadersMessage { + request: Request; + headers: string; + socket: Socket; + } + export interface ClientBeforeConnectMessage { + connectParams: ConnectParams; + connector: Connector; + } + export interface ClientConnectedMessage { + socket: Socket; + connectParams: ConnectParams; + connector: Connector; + } + export interface ClientConnectErrorMessage { + error: Error; + socket: Socket; + connectParams: ConnectParams; + connector: Connector; + } +} diff --git a/node_modules/undici/types/dispatcher.d.ts b/node_modules/undici/types/dispatcher.d.ts new file mode 100644 index 0000000..efc53ee --- /dev/null +++ b/node_modules/undici/types/dispatcher.d.ts @@ -0,0 +1,241 @@ +import { URL } from 'url' +import { Duplex, Readable, Writable } from 'stream' +import { EventEmitter } from 'events' +import { Blob } from 'buffer' +import { IncomingHttpHeaders } from './header' +import BodyReadable from './readable' +import { FormData } from './formdata' +import Errors from './errors' + +type AbortSignal = unknown; + +export default Dispatcher + +/** Dispatcher is the core API used to dispatch requests. */ +declare class Dispatcher extends EventEmitter { + /** Dispatches a request. This API is expected to evolve through semver-major versions and is less stable than the preceding higher level APIs. It is primarily intended for library developers who implement higher level APIs on top of this. */ + dispatch(options: Dispatcher.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; + /** Starts two-way communications with the requested resource. */ + connect(options: Dispatcher.ConnectOptions): Promise; + connect(options: Dispatcher.ConnectOptions, callback: (err: Error | null, data: Dispatcher.ConnectData) => void): void; + /** Performs an HTTP request. */ + request(options: Dispatcher.RequestOptions): Promise; + request(options: Dispatcher.RequestOptions, callback: (err: Error | null, data: Dispatcher.ResponseData) => void): void; + /** For easy use with `stream.pipeline`. */ + pipeline(options: Dispatcher.PipelineOptions, handler: Dispatcher.PipelineHandler): Duplex; + /** A faster version of `Dispatcher.request`. */ + stream(options: Dispatcher.RequestOptions, factory: Dispatcher.StreamFactory): Promise; + stream(options: Dispatcher.RequestOptions, factory: Dispatcher.StreamFactory, callback: (err: Error | null, data: Dispatcher.StreamData) => void): void; + /** Upgrade to a different protocol. */ + upgrade(options: Dispatcher.UpgradeOptions): Promise; + upgrade(options: Dispatcher.UpgradeOptions, callback: (err: Error | null, data: Dispatcher.UpgradeData) => void): void; + /** Closes the client and gracefully waits for enqueued requests to complete before invoking the callback (or returning a promise if no callback is provided). */ + close(): Promise; + close(callback: () => void): void; + /** Destroy the client abruptly with the given err. All the pending and running requests will be asynchronously aborted and error. Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided). Since this operation is asynchronously dispatched there might still be some progress on dispatched requests. */ + destroy(): Promise; + destroy(err: Error | null): Promise; + destroy(callback: () => void): void; + destroy(err: Error | null, callback: () => void): void; + + on(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + on(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + on(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + on(eventName: 'drain', callback: (origin: URL) => void): this; + + + once(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + once(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + once(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + once(eventName: 'drain', callback: (origin: URL) => void): this; + + + off(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + off(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + off(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + off(eventName: 'drain', callback: (origin: URL) => void): this; + + + addListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + addListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + addListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + addListener(eventName: 'drain', callback: (origin: URL) => void): this; + + removeListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + removeListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + removeListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + removeListener(eventName: 'drain', callback: (origin: URL) => void): this; + + prependListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + prependListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependListener(eventName: 'drain', callback: (origin: URL) => void): this; + + prependOnceListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + prependOnceListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependOnceListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependOnceListener(eventName: 'drain', callback: (origin: URL) => void): this; + + listeners(eventName: 'connect'): ((origin: URL, targets: readonly Dispatcher[]) => void)[] + listeners(eventName: 'disconnect'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + listeners(eventName: 'connectionError'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + listeners(eventName: 'drain'): ((origin: URL) => void)[]; + + rawListeners(eventName: 'connect'): ((origin: URL, targets: readonly Dispatcher[]) => void)[] + rawListeners(eventName: 'disconnect'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + rawListeners(eventName: 'connectionError'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + rawListeners(eventName: 'drain'): ((origin: URL) => void)[]; + + emit(eventName: 'connect', origin: URL, targets: readonly Dispatcher[]): boolean; + emit(eventName: 'disconnect', origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError): boolean; + emit(eventName: 'connectionError', origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError): boolean; + emit(eventName: 'drain', origin: URL): boolean; +} + +declare namespace Dispatcher { + export interface DispatchOptions { + origin?: string | URL; + path: string; + method: HttpMethod; + /** Default: `null` */ + body?: string | Buffer | Uint8Array | Readable | null | FormData; + /** Default: `null` */ + headers?: IncomingHttpHeaders | string[] | null; + /** Query string params to be embedded in the request URL. Default: `null` */ + query?: Record; + /** Whether the requests can be safely retried or not. If `false` the request won't be sent until all preceding requests in the pipeline have completed. Default: `true` if `method` is `HEAD` or `GET`. */ + idempotent?: boolean; + /** Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received. */ + blocking?: boolean; + /** Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`. Default: `method === 'CONNECT' || null`. */ + upgrade?: boolean | string | null; + /** The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers. Defaults to 300 seconds. */ + headersTimeout?: number | null; + /** The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use 0 to disable it entirely. Defaults to 300 seconds. */ + bodyTimeout?: number | null; + /** Whether the request should stablish a keep-alive or not. Default `false` */ + reset?: boolean; + /** Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server. Defaults to false */ + throwOnError?: boolean; + /** For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server*/ + expectContinue?: boolean; + } + export interface ConnectOptions { + path: string; + /** Default: `null` */ + headers?: IncomingHttpHeaders | string[] | null; + /** Default: `null` */ + signal?: AbortSignal | EventEmitter | null; + /** This argument parameter is passed through to `ConnectData` */ + opaque?: unknown; + /** Default: 0 */ + maxRedirections?: number; + /** Default: `null` */ + responseHeader?: 'raw' | null; + } + export interface RequestOptions extends DispatchOptions { + /** Default: `null` */ + opaque?: unknown; + /** Default: `null` */ + signal?: AbortSignal | EventEmitter | null; + /** Default: 0 */ + maxRedirections?: number; + /** Default: `null` */ + onInfo?: (info: { statusCode: number, headers: Record }) => void; + /** Default: `null` */ + responseHeader?: 'raw' | null; + /** Default: `64 KiB` */ + highWaterMark?: number; + } + export interface PipelineOptions extends RequestOptions { + /** `true` if the `handler` will return an object stream. Default: `false` */ + objectMode?: boolean; + } + export interface UpgradeOptions { + path: string; + /** Default: `'GET'` */ + method?: string; + /** Default: `null` */ + headers?: IncomingHttpHeaders | string[] | null; + /** A string of comma separated protocols, in descending preference order. Default: `'Websocket'` */ + protocol?: string; + /** Default: `null` */ + signal?: AbortSignal | EventEmitter | null; + /** Default: 0 */ + maxRedirections?: number; + /** Default: `null` */ + responseHeader?: 'raw' | null; + } + export interface ConnectData { + statusCode: number; + headers: IncomingHttpHeaders; + socket: Duplex; + opaque: unknown; + } + export interface ResponseData { + statusCode: number; + headers: IncomingHttpHeaders; + body: BodyReadable & BodyMixin; + trailers: Record; + opaque: unknown; + context: object; + } + export interface PipelineHandlerData { + statusCode: number; + headers: IncomingHttpHeaders; + opaque: unknown; + body: BodyReadable; + context: object; + } + export interface StreamData { + opaque: unknown; + trailers: Record; + } + export interface UpgradeData { + headers: IncomingHttpHeaders; + socket: Duplex; + opaque: unknown; + } + export interface StreamFactoryData { + statusCode: number; + headers: IncomingHttpHeaders; + opaque: unknown; + context: object; + } + export type StreamFactory = (data: StreamFactoryData) => Writable; + export interface DispatchHandlers { + /** Invoked before request is dispatched on socket. May be invoked multiple times when a request is retried when the request at the head of the pipeline fails. */ + onConnect?(abort: () => void): void; + /** Invoked when an error has occurred. */ + onError?(err: Error): void; + /** Invoked when request is upgraded either due to a `Upgrade` header or `CONNECT` method. */ + onUpgrade?(statusCode: number, headers: Buffer[] | string[] | null, socket: Duplex): void; + /** Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. */ + onHeaders?(statusCode: number, headers: Buffer[] | string[] | null, resume: () => void, statusText: string): boolean; + /** Invoked when response payload data is received. */ + onData?(chunk: Buffer): boolean; + /** Invoked when response payload and trailers have been received and the request has completed. */ + onComplete?(trailers: string[] | null): void; + /** Invoked when a body chunk is sent to the server. May be invoked multiple times for chunked requests */ + onBodySent?(chunkSize: number, totalBytesSent: number): void; + } + export type PipelineHandler = (data: PipelineHandlerData) => Readable; + export type HttpMethod = 'GET' | 'HEAD' | 'POST' | 'PUT' | 'DELETE' | 'CONNECT' | 'OPTIONS' | 'TRACE' | 'PATCH'; + + /** + * @link https://fetch.spec.whatwg.org/#body-mixin + */ + interface BodyMixin { + readonly body?: never; // throws on node v16.6.0 + readonly bodyUsed: boolean; + arrayBuffer(): Promise; + blob(): Promise; + formData(): Promise; + json(): Promise; + text(): Promise; + } + + export interface DispatchInterceptor { + (dispatch: Dispatcher['dispatch']): Dispatcher['dispatch'] + } +} diff --git a/node_modules/undici/types/errors.d.ts b/node_modules/undici/types/errors.d.ts new file mode 100644 index 0000000..7923ddd --- /dev/null +++ b/node_modules/undici/types/errors.d.ts @@ -0,0 +1,128 @@ +import { IncomingHttpHeaders } from "./header"; +import Client from './client' + +export default Errors + +declare namespace Errors { + export class UndiciError extends Error { + name: string; + code: string; + } + + /** Connect timeout error. */ + export class ConnectTimeoutError extends UndiciError { + name: 'ConnectTimeoutError'; + code: 'UND_ERR_CONNECT_TIMEOUT'; + } + + /** A header exceeds the `headersTimeout` option. */ + export class HeadersTimeoutError extends UndiciError { + name: 'HeadersTimeoutError'; + code: 'UND_ERR_HEADERS_TIMEOUT'; + } + + /** Headers overflow error. */ + export class HeadersOverflowError extends UndiciError { + name: 'HeadersOverflowError' + code: 'UND_ERR_HEADERS_OVERFLOW' + } + + /** A body exceeds the `bodyTimeout` option. */ + export class BodyTimeoutError extends UndiciError { + name: 'BodyTimeoutError'; + code: 'UND_ERR_BODY_TIMEOUT'; + } + + export class ResponseStatusCodeError extends UndiciError { + constructor ( + message?: string, + statusCode?: number, + headers?: IncomingHttpHeaders | string[] | null, + body?: null | Record | string + ); + name: 'ResponseStatusCodeError'; + code: 'UND_ERR_RESPONSE_STATUS_CODE'; + body: null | Record | string + status: number + statusCode: number + headers: IncomingHttpHeaders | string[] | null; + } + + /** Passed an invalid argument. */ + export class InvalidArgumentError extends UndiciError { + name: 'InvalidArgumentError'; + code: 'UND_ERR_INVALID_ARG'; + } + + /** Returned an invalid value. */ + export class InvalidReturnValueError extends UndiciError { + name: 'InvalidReturnValueError'; + code: 'UND_ERR_INVALID_RETURN_VALUE'; + } + + /** The request has been aborted by the user. */ + export class RequestAbortedError extends UndiciError { + name: 'AbortError'; + code: 'UND_ERR_ABORTED'; + } + + /** Expected error with reason. */ + export class InformationalError extends UndiciError { + name: 'InformationalError'; + code: 'UND_ERR_INFO'; + } + + /** Request body length does not match content-length header. */ + export class RequestContentLengthMismatchError extends UndiciError { + name: 'RequestContentLengthMismatchError'; + code: 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'; + } + + /** Response body length does not match content-length header. */ + export class ResponseContentLengthMismatchError extends UndiciError { + name: 'ResponseContentLengthMismatchError'; + code: 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'; + } + + /** Trying to use a destroyed client. */ + export class ClientDestroyedError extends UndiciError { + name: 'ClientDestroyedError'; + code: 'UND_ERR_DESTROYED'; + } + + /** Trying to use a closed client. */ + export class ClientClosedError extends UndiciError { + name: 'ClientClosedError'; + code: 'UND_ERR_CLOSED'; + } + + /** There is an error with the socket. */ + export class SocketError extends UndiciError { + name: 'SocketError'; + code: 'UND_ERR_SOCKET'; + socket: Client.SocketInfo | null + } + + /** Encountered unsupported functionality. */ + export class NotSupportedError extends UndiciError { + name: 'NotSupportedError'; + code: 'UND_ERR_NOT_SUPPORTED'; + } + + /** No upstream has been added to the BalancedPool. */ + export class BalancedPoolMissingUpstreamError extends UndiciError { + name: 'MissingUpstreamError'; + code: 'UND_ERR_BPL_MISSING_UPSTREAM'; + } + + export class HTTPParserError extends UndiciError { + name: 'HTTPParserError'; + code: string; + } + + /** The response exceed the length allowed. */ + export class ResponseExceededMaxSizeError extends UndiciError { + name: 'ResponseExceededMaxSizeError'; + code: 'UND_ERR_RES_EXCEEDED_MAX_SIZE'; + } +} diff --git a/node_modules/undici/types/fetch.d.ts b/node_modules/undici/types/fetch.d.ts new file mode 100644 index 0000000..440f2b0 --- /dev/null +++ b/node_modules/undici/types/fetch.d.ts @@ -0,0 +1,209 @@ +// based on https://github.com/Ethan-Arrowood/undici-fetch/blob/249269714db874351589d2d364a0645d5160ae71/index.d.ts (MIT license) +// and https://github.com/node-fetch/node-fetch/blob/914ce6be5ec67a8bab63d68510aabf07cb818b6d/index.d.ts (MIT license) +/// + +import { Blob } from 'buffer' +import { URL, URLSearchParams } from 'url' +import { ReadableStream } from 'stream/web' +import { FormData } from './formdata' + +import Dispatcher from './dispatcher' + +export type RequestInfo = string | URL | Request + +export declare function fetch ( + input: RequestInfo, + init?: RequestInit +): Promise + +export type BodyInit = + | ArrayBuffer + | AsyncIterable + | Blob + | FormData + | Iterable + | NodeJS.ArrayBufferView + | URLSearchParams + | null + | string + +export interface BodyMixin { + readonly body: ReadableStream | null + readonly bodyUsed: boolean + + readonly arrayBuffer: () => Promise + readonly blob: () => Promise + readonly formData: () => Promise + readonly json: () => Promise + readonly text: () => Promise +} + +export interface SpecIterator { + next(...args: [] | [TNext]): IteratorResult; +} + +export interface SpecIterableIterator extends SpecIterator { + [Symbol.iterator](): SpecIterableIterator; +} + +export interface SpecIterable { + [Symbol.iterator](): SpecIterator; +} + +export type HeadersInit = string[][] | Record> | Headers + +export declare class Headers implements SpecIterable<[string, string]> { + constructor (init?: HeadersInit) + readonly append: (name: string, value: string) => void + readonly delete: (name: string) => void + readonly get: (name: string) => string | null + readonly has: (name: string) => boolean + readonly set: (name: string, value: string) => void + readonly getSetCookie: () => string[] + readonly forEach: ( + callbackfn: (value: string, key: string, iterable: Headers) => void, + thisArg?: unknown + ) => void + + readonly keys: () => SpecIterableIterator + readonly values: () => SpecIterableIterator + readonly entries: () => SpecIterableIterator<[string, string]> + readonly [Symbol.iterator]: () => SpecIterator<[string, string]> +} + +export type RequestCache = + | 'default' + | 'force-cache' + | 'no-cache' + | 'no-store' + | 'only-if-cached' + | 'reload' + +export type RequestCredentials = 'omit' | 'include' | 'same-origin' + +type RequestDestination = + | '' + | 'audio' + | 'audioworklet' + | 'document' + | 'embed' + | 'font' + | 'image' + | 'manifest' + | 'object' + | 'paintworklet' + | 'report' + | 'script' + | 'sharedworker' + | 'style' + | 'track' + | 'video' + | 'worker' + | 'xslt' + +export interface RequestInit { + method?: string + keepalive?: boolean + headers?: HeadersInit + body?: BodyInit + redirect?: RequestRedirect + integrity?: string + signal?: AbortSignal | null + credentials?: RequestCredentials + mode?: RequestMode + referrer?: string + referrerPolicy?: ReferrerPolicy + window?: null + dispatcher?: Dispatcher + duplex?: RequestDuplex +} + +export type ReferrerPolicy = + | '' + | 'no-referrer' + | 'no-referrer-when-downgrade' + | 'origin' + | 'origin-when-cross-origin' + | 'same-origin' + | 'strict-origin' + | 'strict-origin-when-cross-origin' + | 'unsafe-url'; + +export type RequestMode = 'cors' | 'navigate' | 'no-cors' | 'same-origin' + +export type RequestRedirect = 'error' | 'follow' | 'manual' + +export type RequestDuplex = 'half' + +export declare class Request implements BodyMixin { + constructor (input: RequestInfo, init?: RequestInit) + + readonly cache: RequestCache + readonly credentials: RequestCredentials + readonly destination: RequestDestination + readonly headers: Headers + readonly integrity: string + readonly method: string + readonly mode: RequestMode + readonly redirect: RequestRedirect + readonly referrerPolicy: string + readonly url: string + + readonly keepalive: boolean + readonly signal: AbortSignal + readonly duplex: RequestDuplex + + readonly body: ReadableStream | null + readonly bodyUsed: boolean + + readonly arrayBuffer: () => Promise + readonly blob: () => Promise + readonly formData: () => Promise + readonly json: () => Promise + readonly text: () => Promise + + readonly clone: () => Request +} + +export interface ResponseInit { + readonly status?: number + readonly statusText?: string + readonly headers?: HeadersInit +} + +export type ResponseType = + | 'basic' + | 'cors' + | 'default' + | 'error' + | 'opaque' + | 'opaqueredirect' + +export type ResponseRedirectStatus = 301 | 302 | 303 | 307 | 308 + +export declare class Response implements BodyMixin { + constructor (body?: BodyInit, init?: ResponseInit) + + readonly headers: Headers + readonly ok: boolean + readonly status: number + readonly statusText: string + readonly type: ResponseType + readonly url: string + readonly redirected: boolean + + readonly body: ReadableStream | null + readonly bodyUsed: boolean + + readonly arrayBuffer: () => Promise + readonly blob: () => Promise + readonly formData: () => Promise + readonly json: () => Promise + readonly text: () => Promise + + readonly clone: () => Response + + static error (): Response + static json(data: any, init?: ResponseInit): Response + static redirect (url: string | URL, status: ResponseRedirectStatus): Response +} diff --git a/node_modules/undici/types/file.d.ts b/node_modules/undici/types/file.d.ts new file mode 100644 index 0000000..c695b7a --- /dev/null +++ b/node_modules/undici/types/file.d.ts @@ -0,0 +1,39 @@ +// Based on https://github.com/octet-stream/form-data/blob/2d0f0dc371517444ce1f22cdde13f51995d0953a/lib/File.ts (MIT) +/// + +import { Blob } from 'buffer' + +export interface BlobPropertyBag { + type?: string + endings?: 'native' | 'transparent' +} + +export interface FilePropertyBag extends BlobPropertyBag { + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + lastModified?: number +} + +export declare class File extends Blob { + /** + * Creates a new File instance. + * + * @param fileBits An `Array` strings, or [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), [`ArrayBufferView`](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView), [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects, or a mix of any of such objects, that will be put inside the [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + * @param fileName The name of the file. + * @param options An options object containing optional attributes for the file. + */ + constructor(fileBits: ReadonlyArray, fileName: string, options?: FilePropertyBag) + + /** + * Name of the file referenced by the File object. + */ + readonly name: string + + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + readonly lastModified: number + + readonly [Symbol.toStringTag]: string +} diff --git a/node_modules/undici/types/filereader.d.ts b/node_modules/undici/types/filereader.d.ts new file mode 100644 index 0000000..f05d231 --- /dev/null +++ b/node_modules/undici/types/filereader.d.ts @@ -0,0 +1,54 @@ +/// + +import { Blob } from 'buffer' +import { DOMException, Event, EventInit, EventTarget } from './patch' + +export declare class FileReader { + __proto__: EventTarget & FileReader + + constructor () + + readAsArrayBuffer (blob: Blob): void + readAsBinaryString (blob: Blob): void + readAsText (blob: Blob, encoding?: string): void + readAsDataURL (blob: Blob): void + + abort (): void + + static readonly EMPTY = 0 + static readonly LOADING = 1 + static readonly DONE = 2 + + readonly EMPTY = 0 + readonly LOADING = 1 + readonly DONE = 2 + + readonly readyState: number + + readonly result: string | ArrayBuffer | null + + readonly error: DOMException | null + + onloadstart: null | ((this: FileReader, event: ProgressEvent) => void) + onprogress: null | ((this: FileReader, event: ProgressEvent) => void) + onload: null | ((this: FileReader, event: ProgressEvent) => void) + onabort: null | ((this: FileReader, event: ProgressEvent) => void) + onerror: null | ((this: FileReader, event: ProgressEvent) => void) + onloadend: null | ((this: FileReader, event: ProgressEvent) => void) +} + +export interface ProgressEventInit extends EventInit { + lengthComputable?: boolean + loaded?: number + total?: number +} + +export declare class ProgressEvent { + __proto__: Event & ProgressEvent + + constructor (type: string, eventInitDict?: ProgressEventInit) + + readonly lengthComputable: boolean + readonly loaded: number + readonly total: number +} diff --git a/node_modules/undici/types/formdata.d.ts b/node_modules/undici/types/formdata.d.ts new file mode 100644 index 0000000..df29a57 --- /dev/null +++ b/node_modules/undici/types/formdata.d.ts @@ -0,0 +1,108 @@ +// Based on https://github.com/octet-stream/form-data/blob/2d0f0dc371517444ce1f22cdde13f51995d0953a/lib/FormData.ts (MIT) +/// + +import { File } from './file' +import { SpecIterator, SpecIterableIterator } from './fetch' + +/** + * A `string` or `File` that represents a single value from a set of `FormData` key-value pairs. + */ +declare type FormDataEntryValue = string | File + +/** + * Provides a way to easily construct a set of key/value pairs representing form fields and their values, which can then be easily sent using fetch(). + */ +export declare class FormData { + /** + * Appends a new value onto an existing key inside a FormData object, + * or adds the key if it does not already exist. + * + * The difference between `set()` and `append()` is that if the specified key already exists, `set()` will overwrite all existing values with the new one, whereas `append()` will append the new value onto the end of the existing set of values. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + */ + append(name: string, value: unknown, fileName?: string): void + + /** + * Set a new value for an existing key inside FormData, + * or add the new field if it does not already exist. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + * + */ + set(name: string, value: unknown, fileName?: string): void + + /** + * Returns the first value associated with a given key from within a `FormData` object. + * If you expect multiple values and want all of them, use the `getAll()` method instead. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns A `FormDataEntryValue` containing the value. If the key doesn't exist, the method returns null. + */ + get(name: string): FormDataEntryValue | null + + /** + * Returns all the values associated with a given key from within a `FormData` object. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns An array of `FormDataEntryValue` whose key matches the value passed in the `name` parameter. If the key doesn't exist, the method returns an empty list. + */ + getAll(name: string): FormDataEntryValue[] + + /** + * Returns a boolean stating whether a `FormData` object contains a certain key. + * + * @param name A string representing the name of the key you want to test for. + * + * @return A boolean value. + */ + has(name: string): boolean + + /** + * Deletes a key and its value(s) from a `FormData` object. + * + * @param name The name of the key you want to delete. + */ + delete(name: string): void + + /** + * Executes given callback function for each field of the FormData instance + */ + forEach: ( + callbackfn: (value: FormDataEntryValue, key: string, iterable: FormData) => void, + thisArg?: unknown + ) => void + + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all keys contained in this `FormData` object. + * Each key is a `string`. + */ + keys: () => SpecIterableIterator + + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all values contained in this object `FormData` object. + * Each value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + values: () => SpecIterableIterator + + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through the `FormData` key/value pairs. + * The key of each pair is a string; the value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + entries: () => SpecIterableIterator<[string, FormDataEntryValue]> + + /** + * An alias for FormData#entries() + */ + [Symbol.iterator]: () => SpecIterableIterator<[string, FormDataEntryValue]> + + readonly [Symbol.toStringTag]: string +} diff --git a/node_modules/undici/types/global-dispatcher.d.ts b/node_modules/undici/types/global-dispatcher.d.ts new file mode 100644 index 0000000..728f95c --- /dev/null +++ b/node_modules/undici/types/global-dispatcher.d.ts @@ -0,0 +1,9 @@ +import Dispatcher from "./dispatcher"; + +export { + getGlobalDispatcher, + setGlobalDispatcher +} + +declare function setGlobalDispatcher(dispatcher: DispatcherImplementation): void; +declare function getGlobalDispatcher(): Dispatcher; diff --git a/node_modules/undici/types/global-origin.d.ts b/node_modules/undici/types/global-origin.d.ts new file mode 100644 index 0000000..322542d --- /dev/null +++ b/node_modules/undici/types/global-origin.d.ts @@ -0,0 +1,7 @@ +export { + setGlobalOrigin, + getGlobalOrigin +} + +declare function setGlobalOrigin(origin: string | URL | undefined): void; +declare function getGlobalOrigin(): URL | undefined; \ No newline at end of file diff --git a/node_modules/undici/types/handlers.d.ts b/node_modules/undici/types/handlers.d.ts new file mode 100644 index 0000000..eb4f5a9 --- /dev/null +++ b/node_modules/undici/types/handlers.d.ts @@ -0,0 +1,9 @@ +import Dispatcher from "./dispatcher"; + +export declare class RedirectHandler implements Dispatcher.DispatchHandlers{ + constructor (dispatch: Dispatcher, maxRedirections: number, opts: Dispatcher.DispatchOptions, handler: Dispatcher.DispatchHandlers) +} + +export declare class DecoratorHandler implements Dispatcher.DispatchHandlers{ + constructor (handler: Dispatcher.DispatchHandlers) +} diff --git a/node_modules/undici/types/header.d.ts b/node_modules/undici/types/header.d.ts new file mode 100644 index 0000000..bfdb329 --- /dev/null +++ b/node_modules/undici/types/header.d.ts @@ -0,0 +1,4 @@ +/** + * The header type declaration of `undici`. + */ +export type IncomingHttpHeaders = Record; diff --git a/node_modules/undici/types/index.d.ts b/node_modules/undici/types/index.d.ts new file mode 100644 index 0000000..0ea8bdc --- /dev/null +++ b/node_modules/undici/types/index.d.ts @@ -0,0 +1,65 @@ +import Dispatcher from'./dispatcher' +import { setGlobalDispatcher, getGlobalDispatcher } from './global-dispatcher' +import { setGlobalOrigin, getGlobalOrigin } from './global-origin' +import Pool from'./pool' +import { RedirectHandler, DecoratorHandler } from './handlers' + +import BalancedPool from './balanced-pool' +import Client from'./client' +import buildConnector from'./connector' +import errors from'./errors' +import Agent from'./agent' +import MockClient from'./mock-client' +import MockPool from'./mock-pool' +import MockAgent from'./mock-agent' +import mockErrors from'./mock-errors' +import ProxyAgent from'./proxy-agent' +import RetryHandler from'./retry-handler' +import { request, pipeline, stream, connect, upgrade } from './api' + +export * from './cookies' +export * from './fetch' +export * from './file' +export * from './filereader' +export * from './formdata' +export * from './diagnostics-channel' +export * from './websocket' +export * from './content-type' +export * from './cache' +export { Interceptable } from './mock-interceptor' + +export { Dispatcher, BalancedPool, Pool, Client, buildConnector, errors, Agent, request, stream, pipeline, connect, upgrade, setGlobalDispatcher, getGlobalDispatcher, setGlobalOrigin, getGlobalOrigin, MockClient, MockPool, MockAgent, mockErrors, ProxyAgent, RedirectHandler, DecoratorHandler, RetryHandler } +export default Undici + +declare namespace Undici { + var Dispatcher: typeof import('./dispatcher').default + var Pool: typeof import('./pool').default; + var RedirectHandler: typeof import ('./handlers').RedirectHandler + var DecoratorHandler: typeof import ('./handlers').DecoratorHandler + var RetryHandler: typeof import ('./retry-handler').default + var createRedirectInterceptor: typeof import ('./interceptors').createRedirectInterceptor + var BalancedPool: typeof import('./balanced-pool').default; + var Client: typeof import('./client').default; + var buildConnector: typeof import('./connector').default; + var errors: typeof import('./errors').default; + var Agent: typeof import('./agent').default; + var setGlobalDispatcher: typeof import('./global-dispatcher').setGlobalDispatcher; + var getGlobalDispatcher: typeof import('./global-dispatcher').getGlobalDispatcher; + var request: typeof import('./api').request; + var stream: typeof import('./api').stream; + var pipeline: typeof import('./api').pipeline; + var connect: typeof import('./api').connect; + var upgrade: typeof import('./api').upgrade; + var MockClient: typeof import('./mock-client').default; + var MockPool: typeof import('./mock-pool').default; + var MockAgent: typeof import('./mock-agent').default; + var mockErrors: typeof import('./mock-errors').default; + var fetch: typeof import('./fetch').fetch; + var Headers: typeof import('./fetch').Headers; + var Response: typeof import('./fetch').Response; + var Request: typeof import('./fetch').Request; + var FormData: typeof import('./formdata').FormData; + var File: typeof import('./file').File; + var FileReader: typeof import('./filereader').FileReader; + var caches: typeof import('./cache').caches; +} diff --git a/node_modules/undici/types/interceptors.d.ts b/node_modules/undici/types/interceptors.d.ts new file mode 100644 index 0000000..047ac17 --- /dev/null +++ b/node_modules/undici/types/interceptors.d.ts @@ -0,0 +1,5 @@ +import Dispatcher from "./dispatcher"; + +type RedirectInterceptorOpts = { maxRedirections?: number } + +export declare function createRedirectInterceptor (opts: RedirectInterceptorOpts): Dispatcher.DispatchInterceptor diff --git a/node_modules/undici/types/mock-agent.d.ts b/node_modules/undici/types/mock-agent.d.ts new file mode 100644 index 0000000..98cd645 --- /dev/null +++ b/node_modules/undici/types/mock-agent.d.ts @@ -0,0 +1,50 @@ +import Agent from './agent' +import Dispatcher from './dispatcher' +import { Interceptable, MockInterceptor } from './mock-interceptor' +import MockDispatch = MockInterceptor.MockDispatch; + +export default MockAgent + +interface PendingInterceptor extends MockDispatch { + origin: string; +} + +/** A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead. */ +declare class MockAgent extends Dispatcher { + constructor(options?: MockAgent.Options) + /** Creates and retrieves mock Dispatcher instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned. */ + get(origin: string): TInterceptable; + get(origin: RegExp): TInterceptable; + get(origin: ((origin: string) => boolean)): TInterceptable; + /** Dispatches a mocked request. */ + dispatch(options: Agent.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; + /** Closes the mock agent and waits for registered mock pools and clients to also close before resolving. */ + close(): Promise; + /** Disables mocking in MockAgent. */ + deactivate(): void; + /** Enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called. */ + activate(): void; + /** Define host matchers so only matching requests that aren't intercepted by the mock dispatchers will be attempted. */ + enableNetConnect(): void; + enableNetConnect(host: string): void; + enableNetConnect(host: RegExp): void; + enableNetConnect(host: ((host: string) => boolean)): void; + /** Causes all requests to throw when requests are not matched in a MockAgent intercept. */ + disableNetConnect(): void; + pendingInterceptors(): PendingInterceptor[]; + assertNoPendingInterceptors(options?: { + pendingInterceptorsFormatter?: PendingInterceptorsFormatter; + }): void; +} + +interface PendingInterceptorsFormatter { + format(pendingInterceptors: readonly PendingInterceptor[]): string; +} + +declare namespace MockAgent { + /** MockAgent options. */ + export interface Options extends Agent.Options { + /** A custom agent to be encapsulated by the MockAgent. */ + agent?: Agent; + } +} diff --git a/node_modules/undici/types/mock-client.d.ts b/node_modules/undici/types/mock-client.d.ts new file mode 100644 index 0000000..51d008c --- /dev/null +++ b/node_modules/undici/types/mock-client.d.ts @@ -0,0 +1,25 @@ +import Client from './client' +import Dispatcher from './dispatcher' +import MockAgent from './mock-agent' +import { MockInterceptor, Interceptable } from './mock-interceptor' + +export default MockClient + +/** MockClient extends the Client API and allows one to mock requests. */ +declare class MockClient extends Client implements Interceptable { + constructor(origin: string, options: MockClient.Options); + /** Intercepts any matching requests that use the same origin as this mock client. */ + intercept(options: MockInterceptor.Options): MockInterceptor; + /** Dispatches a mocked request. */ + dispatch(options: Dispatcher.DispatchOptions, handlers: Dispatcher.DispatchHandlers): boolean; + /** Closes the mock client and gracefully waits for enqueued requests to complete. */ + close(): Promise; +} + +declare namespace MockClient { + /** MockClient options. */ + export interface Options extends Client.Options { + /** The agent to associate this MockClient with. */ + agent: MockAgent; + } +} diff --git a/node_modules/undici/types/mock-errors.d.ts b/node_modules/undici/types/mock-errors.d.ts new file mode 100644 index 0000000..3d9e727 --- /dev/null +++ b/node_modules/undici/types/mock-errors.d.ts @@ -0,0 +1,12 @@ +import Errors from './errors' + +export default MockErrors + +declare namespace MockErrors { + /** The request does not match any registered mock dispatches. */ + export class MockNotMatchedError extends Errors.UndiciError { + constructor(message?: string); + name: 'MockNotMatchedError'; + code: 'UND_MOCK_ERR_MOCK_NOT_MATCHED'; + } +} diff --git a/node_modules/undici/types/mock-interceptor.d.ts b/node_modules/undici/types/mock-interceptor.d.ts new file mode 100644 index 0000000..6b3961c --- /dev/null +++ b/node_modules/undici/types/mock-interceptor.d.ts @@ -0,0 +1,93 @@ +import { IncomingHttpHeaders } from './header' +import Dispatcher from './dispatcher'; +import { BodyInit, Headers } from './fetch' + +export { + Interceptable, + MockInterceptor, + MockScope +} + +/** The scope associated with a mock dispatch. */ +declare class MockScope { + constructor(mockDispatch: MockInterceptor.MockDispatch); + /** Delay a reply by a set amount of time in ms. */ + delay(waitInMs: number): MockScope; + /** Persist the defined mock data for the associated reply. It will return the defined mock data indefinitely. */ + persist(): MockScope; + /** Define a reply for a set amount of matching requests. */ + times(repeatTimes: number): MockScope; +} + +/** The interceptor for a Mock. */ +declare class MockInterceptor { + constructor(options: MockInterceptor.Options, mockDispatches: MockInterceptor.MockDispatch[]); + /** Mock an undici request with the defined reply. */ + reply(replyOptionsCallback: MockInterceptor.MockReplyOptionsCallback): MockScope; + reply( + statusCode: number, + data?: TData | Buffer | string | MockInterceptor.MockResponseDataHandler, + responseOptions?: MockInterceptor.MockResponseOptions + ): MockScope; + /** Mock an undici request by throwing the defined reply error. */ + replyWithError(error: TError): MockScope; + /** Set default reply headers on the interceptor for subsequent mocked replies. */ + defaultReplyHeaders(headers: IncomingHttpHeaders): MockInterceptor; + /** Set default reply trailers on the interceptor for subsequent mocked replies. */ + defaultReplyTrailers(trailers: Record): MockInterceptor; + /** Set automatically calculated content-length header on subsequent mocked replies. */ + replyContentLength(): MockInterceptor; +} + +declare namespace MockInterceptor { + /** MockInterceptor options. */ + export interface Options { + /** Path to intercept on. */ + path: string | RegExp | ((path: string) => boolean); + /** Method to intercept on. Defaults to GET. */ + method?: string | RegExp | ((method: string) => boolean); + /** Body to intercept on. */ + body?: string | RegExp | ((body: string) => boolean); + /** Headers to intercept on. */ + headers?: Record boolean)> | ((headers: Record) => boolean); + /** Query params to intercept on */ + query?: Record; + } + export interface MockDispatch extends Options { + times: number | null; + persist: boolean; + consumed: boolean; + data: MockDispatchData; + } + export interface MockDispatchData extends MockResponseOptions { + error: TError | null; + statusCode?: number; + data?: TData | string; + } + export interface MockResponseOptions { + headers?: IncomingHttpHeaders; + trailers?: Record; + } + + export interface MockResponseCallbackOptions { + path: string; + origin: string; + method: string; + body?: BodyInit | Dispatcher.DispatchOptions['body']; + headers: Headers | Record; + maxRedirections: number; + } + + export type MockResponseDataHandler = ( + opts: MockResponseCallbackOptions + ) => TData | Buffer | string; + + export type MockReplyOptionsCallback = ( + opts: MockResponseCallbackOptions + ) => { statusCode: number, data?: TData | Buffer | string, responseOptions?: MockResponseOptions } +} + +interface Interceptable extends Dispatcher { + /** Intercepts any matching requests that use the same origin as this mock client. */ + intercept(options: MockInterceptor.Options): MockInterceptor; +} diff --git a/node_modules/undici/types/mock-pool.d.ts b/node_modules/undici/types/mock-pool.d.ts new file mode 100644 index 0000000..39e709a --- /dev/null +++ b/node_modules/undici/types/mock-pool.d.ts @@ -0,0 +1,25 @@ +import Pool from './pool' +import MockAgent from './mock-agent' +import { Interceptable, MockInterceptor } from './mock-interceptor' +import Dispatcher from './dispatcher' + +export default MockPool + +/** MockPool extends the Pool API and allows one to mock requests. */ +declare class MockPool extends Pool implements Interceptable { + constructor(origin: string, options: MockPool.Options); + /** Intercepts any matching requests that use the same origin as this mock pool. */ + intercept(options: MockInterceptor.Options): MockInterceptor; + /** Dispatches a mocked request. */ + dispatch(options: Dispatcher.DispatchOptions, handlers: Dispatcher.DispatchHandlers): boolean; + /** Closes the mock pool and gracefully waits for enqueued requests to complete. */ + close(): Promise; +} + +declare namespace MockPool { + /** MockPool options. */ + export interface Options extends Pool.Options { + /** The agent to associate this MockPool with. */ + agent: MockAgent; + } +} diff --git a/node_modules/undici/types/patch.d.ts b/node_modules/undici/types/patch.d.ts new file mode 100644 index 0000000..3871acf --- /dev/null +++ b/node_modules/undici/types/patch.d.ts @@ -0,0 +1,71 @@ +/// + +// See https://github.com/nodejs/undici/issues/1740 + +export type DOMException = typeof globalThis extends { DOMException: infer T } + ? T + : any + +export type EventTarget = typeof globalThis extends { EventTarget: infer T } + ? T + : { + addEventListener( + type: string, + listener: any, + options?: any, + ): void + dispatchEvent(event: Event): boolean + removeEventListener( + type: string, + listener: any, + options?: any | boolean, + ): void + } + +export type Event = typeof globalThis extends { Event: infer T } + ? T + : { + readonly bubbles: boolean + cancelBubble: () => void + readonly cancelable: boolean + readonly composed: boolean + composedPath(): [EventTarget?] + readonly currentTarget: EventTarget | null + readonly defaultPrevented: boolean + readonly eventPhase: 0 | 2 + readonly isTrusted: boolean + preventDefault(): void + returnValue: boolean + readonly srcElement: EventTarget | null + stopImmediatePropagation(): void + stopPropagation(): void + readonly target: EventTarget | null + readonly timeStamp: number + readonly type: string + } + +export interface EventInit { + bubbles?: boolean + cancelable?: boolean + composed?: boolean +} + +export interface EventListenerOptions { + capture?: boolean +} + +export interface AddEventListenerOptions extends EventListenerOptions { + once?: boolean + passive?: boolean + signal?: AbortSignal +} + +export type EventListenerOrEventListenerObject = EventListener | EventListenerObject + +export interface EventListenerObject { + handleEvent (object: Event): void +} + +export interface EventListener { + (evt: Event): void +} diff --git a/node_modules/undici/types/pool-stats.d.ts b/node_modules/undici/types/pool-stats.d.ts new file mode 100644 index 0000000..8b6d2bf --- /dev/null +++ b/node_modules/undici/types/pool-stats.d.ts @@ -0,0 +1,19 @@ +import Pool from "./pool" + +export default PoolStats + +declare class PoolStats { + constructor(pool: Pool); + /** Number of open socket connections in this pool. */ + connected: number; + /** Number of open socket connections in this pool that do not have an active request. */ + free: number; + /** Number of pending requests across all clients in this pool. */ + pending: number; + /** Number of queued requests across all clients in this pool. */ + queued: number; + /** Number of currently active requests across all clients in this pool. */ + running: number; + /** Number of active, pending, or queued requests across all clients in this pool. */ + size: number; +} diff --git a/node_modules/undici/types/pool.d.ts b/node_modules/undici/types/pool.d.ts new file mode 100644 index 0000000..7747d48 --- /dev/null +++ b/node_modules/undici/types/pool.d.ts @@ -0,0 +1,28 @@ +import Client from './client' +import TPoolStats from './pool-stats' +import { URL } from 'url' +import Dispatcher from "./dispatcher"; + +export default Pool + +declare class Pool extends Dispatcher { + constructor(url: string | URL, options?: Pool.Options) + /** `true` after `pool.close()` has been called. */ + closed: boolean; + /** `true` after `pool.destroyed()` has been called or `pool.close()` has been called and the pool shutdown has completed. */ + destroyed: boolean; + /** Aggregate stats for a Pool. */ + readonly stats: TPoolStats; +} + +declare namespace Pool { + export type PoolStats = TPoolStats; + export interface Options extends Client.Options { + /** Default: `(origin, opts) => new Client(origin, opts)`. */ + factory?(origin: URL, opts: object): Dispatcher; + /** The max number of clients to create. `null` if no limit. Default `null`. */ + connections?: number | null; + + interceptors?: { Pool?: readonly Dispatcher.DispatchInterceptor[] } & Client.Options["interceptors"] + } +} diff --git a/node_modules/undici/types/proxy-agent.d.ts b/node_modules/undici/types/proxy-agent.d.ts new file mode 100644 index 0000000..96b2638 --- /dev/null +++ b/node_modules/undici/types/proxy-agent.d.ts @@ -0,0 +1,30 @@ +import Agent from './agent' +import buildConnector from './connector'; +import Client from './client' +import Dispatcher from './dispatcher' +import { IncomingHttpHeaders } from './header' +import Pool from './pool' + +export default ProxyAgent + +declare class ProxyAgent extends Dispatcher { + constructor(options: ProxyAgent.Options | string) + + dispatch(options: Agent.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; + close(): Promise; +} + +declare namespace ProxyAgent { + export interface Options extends Agent.Options { + uri: string; + /** + * @deprecated use opts.token + */ + auth?: string; + token?: string; + headers?: IncomingHttpHeaders; + requestTls?: buildConnector.BuildOptions; + proxyTls?: buildConnector.BuildOptions; + clientFactory?(origin: URL, opts: object): Dispatcher; + } +} diff --git a/node_modules/undici/types/readable.d.ts b/node_modules/undici/types/readable.d.ts new file mode 100644 index 0000000..4549a8c --- /dev/null +++ b/node_modules/undici/types/readable.d.ts @@ -0,0 +1,61 @@ +import { Readable } from "stream"; +import { Blob } from 'buffer' + +export default BodyReadable + +declare class BodyReadable extends Readable { + constructor( + resume?: (this: Readable, size: number) => void | null, + abort?: () => void | null, + contentType?: string + ) + + /** Consumes and returns the body as a string + * https://fetch.spec.whatwg.org/#dom-body-text + */ + text(): Promise + + /** Consumes and returns the body as a JavaScript Object + * https://fetch.spec.whatwg.org/#dom-body-json + */ + json(): Promise + + /** Consumes and returns the body as a Blob + * https://fetch.spec.whatwg.org/#dom-body-blob + */ + blob(): Promise + + /** Consumes and returns the body as an ArrayBuffer + * https://fetch.spec.whatwg.org/#dom-body-arraybuffer + */ + arrayBuffer(): Promise + + /** Not implemented + * + * https://fetch.spec.whatwg.org/#dom-body-formdata + */ + formData(): Promise + + /** Returns true if the body is not null and the body has been consumed + * + * Otherwise, returns false + * + * https://fetch.spec.whatwg.org/#dom-body-bodyused + */ + readonly bodyUsed: boolean + + /** Throws on node 16.6.0 + * + * If body is null, it should return null as the body + * + * If body is not null, should return the body as a ReadableStream + * + * https://fetch.spec.whatwg.org/#dom-body-body + */ + readonly body: never | undefined + + /** Dumps the response body by reading `limit` number of bytes. + * @param opts.limit Number of bytes to read (optional) - Default: 262144 + */ + dump(opts?: { limit: number }): Promise +} diff --git a/node_modules/undici/types/retry-handler.d.ts b/node_modules/undici/types/retry-handler.d.ts new file mode 100644 index 0000000..0528eb4 --- /dev/null +++ b/node_modules/undici/types/retry-handler.d.ts @@ -0,0 +1,116 @@ +import Dispatcher from "./dispatcher"; + +export default RetryHandler; + +declare class RetryHandler implements Dispatcher.DispatchHandlers { + constructor( + options: Dispatcher.DispatchOptions & { + retryOptions?: RetryHandler.RetryOptions; + }, + retryHandlers: RetryHandler.RetryHandlers + ); +} + +declare namespace RetryHandler { + export type RetryState = { counter: number; currentTimeout: number }; + + export type RetryContext = { + state: RetryState; + opts: Dispatcher.DispatchOptions & { + retryOptions?: RetryHandler.RetryOptions; + }; + } + + export type OnRetryCallback = (result?: Error | null) => void; + + export type RetryCallback = ( + err: Error, + context: { + state: RetryState; + opts: Dispatcher.DispatchOptions & { + retryOptions?: RetryHandler.RetryOptions; + }; + }, + callback: OnRetryCallback + ) => number | null; + + export interface RetryOptions { + /** + * Callback to be invoked on every retry iteration. + * It receives the error, current state of the retry object and the options object + * passed when instantiating the retry handler. + * + * @type {RetryCallback} + * @memberof RetryOptions + */ + retry?: RetryCallback; + /** + * Maximum number of retries to allow. + * + * @type {number} + * @memberof RetryOptions + * @default 5 + */ + maxRetries?: number; + /** + * Max number of milliseconds allow between retries + * + * @type {number} + * @memberof RetryOptions + * @default 30000 + */ + maxTimeout?: number; + /** + * Initial number of milliseconds to wait before retrying for the first time. + * + * @type {number} + * @memberof RetryOptions + * @default 500 + */ + minTimeout?: number; + /** + * Factior to multiply the timeout factor between retries. + * + * @type {number} + * @memberof RetryOptions + * @default 2 + */ + timeoutFactor?: number; + /** + * It enables to automatically infer timeout between retries based on the `Retry-After` header. + * + * @type {boolean} + * @memberof RetryOptions + * @default true + */ + retryAfter?: boolean; + /** + * HTTP methods to retry. + * + * @type {Dispatcher.HttpMethod[]} + * @memberof RetryOptions + * @default ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + */ + methods?: Dispatcher.HttpMethod[]; + /** + * Error codes to be retried. e.g. `ECONNRESET`, `ENOTFOUND`, `ETIMEDOUT`, `ECONNREFUSED`, etc. + * + * @type {string[]} + * @default ['ECONNRESET','ECONNREFUSED','ENOTFOUND','ENETDOWN','ENETUNREACH','EHOSTDOWN','EHOSTUNREACH','EPIPE'] + */ + errorCodes?: string[]; + /** + * HTTP status codes to be retried. + * + * @type {number[]} + * @memberof RetryOptions + * @default [500, 502, 503, 504, 429], + */ + statusCodes?: number[]; + } + + export interface RetryHandlers { + dispatch: Dispatcher["dispatch"]; + handler: Dispatcher.DispatchHandlers; + } +} diff --git a/node_modules/undici/types/webidl.d.ts b/node_modules/undici/types/webidl.d.ts new file mode 100644 index 0000000..40cfe06 --- /dev/null +++ b/node_modules/undici/types/webidl.d.ts @@ -0,0 +1,220 @@ +// These types are not exported, and are only used internally + +/** + * Take in an unknown value and return one that is of type T + */ +type Converter = (object: unknown) => T + +type SequenceConverter = (object: unknown) => T[] + +type RecordConverter = (object: unknown) => Record + +interface ConvertToIntOpts { + clamp?: boolean + enforceRange?: boolean +} + +interface WebidlErrors { + exception (opts: { header: string, message: string }): TypeError + /** + * @description Throw an error when conversion from one type to another has failed + */ + conversionFailed (opts: { + prefix: string + argument: string + types: string[] + }): TypeError + /** + * @description Throw an error when an invalid argument is provided + */ + invalidArgument (opts: { + prefix: string + value: string + type: string + }): TypeError +} + +interface WebidlUtil { + /** + * @see https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values + */ + Type (object: unknown): + | 'Undefined' + | 'Boolean' + | 'String' + | 'Symbol' + | 'Number' + | 'BigInt' + | 'Null' + | 'Object' + + /** + * @see https://webidl.spec.whatwg.org/#abstract-opdef-converttoint + */ + ConvertToInt ( + V: unknown, + bitLength: number, + signedness: 'signed' | 'unsigned', + opts?: ConvertToIntOpts + ): number + + /** + * @see https://webidl.spec.whatwg.org/#abstract-opdef-converttoint + */ + IntegerPart (N: number): number +} + +interface WebidlConverters { + /** + * @see https://webidl.spec.whatwg.org/#es-DOMString + */ + DOMString (V: unknown, opts?: { + legacyNullToEmptyString: boolean + }): string + + /** + * @see https://webidl.spec.whatwg.org/#es-ByteString + */ + ByteString (V: unknown): string + + /** + * @see https://webidl.spec.whatwg.org/#es-USVString + */ + USVString (V: unknown): string + + /** + * @see https://webidl.spec.whatwg.org/#es-boolean + */ + boolean (V: unknown): boolean + + /** + * @see https://webidl.spec.whatwg.org/#es-any + */ + any (V: Value): Value + + /** + * @see https://webidl.spec.whatwg.org/#es-long-long + */ + ['long long'] (V: unknown): number + + /** + * @see https://webidl.spec.whatwg.org/#es-unsigned-long-long + */ + ['unsigned long long'] (V: unknown): number + + /** + * @see https://webidl.spec.whatwg.org/#es-unsigned-long + */ + ['unsigned long'] (V: unknown): number + + /** + * @see https://webidl.spec.whatwg.org/#es-unsigned-short + */ + ['unsigned short'] (V: unknown, opts?: ConvertToIntOpts): number + + /** + * @see https://webidl.spec.whatwg.org/#idl-ArrayBuffer + */ + ArrayBuffer (V: unknown): ArrayBufferLike + ArrayBuffer (V: unknown, opts: { allowShared: false }): ArrayBuffer + + /** + * @see https://webidl.spec.whatwg.org/#es-buffer-source-types + */ + TypedArray ( + V: unknown, + TypedArray: NodeJS.TypedArray | ArrayBufferLike + ): NodeJS.TypedArray | ArrayBufferLike + TypedArray ( + V: unknown, + TypedArray: NodeJS.TypedArray | ArrayBufferLike, + opts?: { allowShared: false } + ): NodeJS.TypedArray | ArrayBuffer + + /** + * @see https://webidl.spec.whatwg.org/#es-buffer-source-types + */ + DataView (V: unknown, opts?: { allowShared: boolean }): DataView + + /** + * @see https://webidl.spec.whatwg.org/#BufferSource + */ + BufferSource ( + V: unknown, + opts?: { allowShared: boolean } + ): NodeJS.TypedArray | ArrayBufferLike | DataView + + ['sequence']: SequenceConverter + + ['sequence>']: SequenceConverter + + ['record']: RecordConverter + + [Key: string]: (...args: any[]) => unknown +} + +export interface Webidl { + errors: WebidlErrors + util: WebidlUtil + converters: WebidlConverters + + /** + * @description Performs a brand-check on {@param V} to ensure it is a + * {@param cls} object. + */ + brandCheck (V: unknown, cls: Interface, opts?: { strict?: boolean }): asserts V is Interface + + /** + * @see https://webidl.spec.whatwg.org/#es-sequence + * @description Convert a value, V, to a WebIDL sequence type. + */ + sequenceConverter (C: Converter): SequenceConverter + + illegalConstructor (): never + + /** + * @see https://webidl.spec.whatwg.org/#es-to-record + * @description Convert a value, V, to a WebIDL record type. + */ + recordConverter ( + keyConverter: Converter, + valueConverter: Converter + ): RecordConverter + + /** + * Similar to {@link Webidl.brandCheck} but allows skipping the check if third party + * interfaces are allowed. + */ + interfaceConverter (cls: Interface): ( + V: unknown, + opts?: { strict: boolean } + ) => asserts V is typeof cls + + // TODO(@KhafraDev): a type could likely be implemented that can infer the return type + // from the converters given? + /** + * Converts a value, V, to a WebIDL dictionary types. Allows limiting which keys are + * allowed, values allowed, optional and required keys. Auto converts the value to + * a type given a converter. + */ + dictionaryConverter (converters: { + key: string, + defaultValue?: unknown, + required?: boolean, + converter: (...args: unknown[]) => unknown, + allowedValues?: unknown[] + }[]): (V: unknown) => Record + + /** + * @see https://webidl.spec.whatwg.org/#idl-nullable-type + * @description allows a type, V, to be null + */ + nullableConverter ( + converter: Converter + ): (V: unknown) => ReturnType | null + + argumentLengthCheck (args: { length: number }, min: number, context: { + header: string + message?: string + }): void +} diff --git a/node_modules/undici/types/websocket.d.ts b/node_modules/undici/types/websocket.d.ts new file mode 100644 index 0000000..15a357d --- /dev/null +++ b/node_modules/undici/types/websocket.d.ts @@ -0,0 +1,131 @@ +/// + +import type { Blob } from 'buffer' +import type { MessagePort } from 'worker_threads' +import { + EventTarget, + Event, + EventInit, + EventListenerOptions, + AddEventListenerOptions, + EventListenerOrEventListenerObject +} from './patch' +import Dispatcher from './dispatcher' +import { HeadersInit } from './fetch' + +export type BinaryType = 'blob' | 'arraybuffer' + +interface WebSocketEventMap { + close: CloseEvent + error: Event + message: MessageEvent + open: Event +} + +interface WebSocket extends EventTarget { + binaryType: BinaryType + + readonly bufferedAmount: number + readonly extensions: string + + onclose: ((this: WebSocket, ev: WebSocketEventMap['close']) => any) | null + onerror: ((this: WebSocket, ev: WebSocketEventMap['error']) => any) | null + onmessage: ((this: WebSocket, ev: WebSocketEventMap['message']) => any) | null + onopen: ((this: WebSocket, ev: WebSocketEventMap['open']) => any) | null + + readonly protocol: string + readonly readyState: number + readonly url: string + + close(code?: number, reason?: string): void + send(data: string | ArrayBufferLike | Blob | ArrayBufferView): void + + readonly CLOSED: number + readonly CLOSING: number + readonly CONNECTING: number + readonly OPEN: number + + addEventListener( + type: K, + listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any, + options?: boolean | AddEventListenerOptions + ): void + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions + ): void + removeEventListener( + type: K, + listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any, + options?: boolean | EventListenerOptions + ): void + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions + ): void +} + +export declare const WebSocket: { + prototype: WebSocket + new (url: string | URL, protocols?: string | string[] | WebSocketInit): WebSocket + readonly CLOSED: number + readonly CLOSING: number + readonly CONNECTING: number + readonly OPEN: number +} + +interface CloseEventInit extends EventInit { + code?: number + reason?: string + wasClean?: boolean +} + +interface CloseEvent extends Event { + readonly code: number + readonly reason: string + readonly wasClean: boolean +} + +export declare const CloseEvent: { + prototype: CloseEvent + new (type: string, eventInitDict?: CloseEventInit): CloseEvent +} + +interface MessageEventInit extends EventInit { + data?: T + lastEventId?: string + origin?: string + ports?: (typeof MessagePort)[] + source?: typeof MessagePort | null +} + +interface MessageEvent extends Event { + readonly data: T + readonly lastEventId: string + readonly origin: string + readonly ports: ReadonlyArray + readonly source: typeof MessagePort | null + initMessageEvent( + type: string, + bubbles?: boolean, + cancelable?: boolean, + data?: any, + origin?: string, + lastEventId?: string, + source?: typeof MessagePort | null, + ports?: (typeof MessagePort)[] + ): void; +} + +export declare const MessageEvent: { + prototype: MessageEvent + new(type: string, eventInitDict?: MessageEventInit): MessageEvent +} + +interface WebSocketInit { + protocols?: string | string[], + dispatcher?: Dispatcher, + headers?: HeadersInit +} diff --git a/node_modules/webidl-conversions/LICENSE.md b/node_modules/webidl-conversions/LICENSE.md new file mode 100644 index 0000000..d4a994f --- /dev/null +++ b/node_modules/webidl-conversions/LICENSE.md @@ -0,0 +1,12 @@ +# The BSD 2-Clause License + +Copyright (c) 2014, Domenic Denicola +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/webidl-conversions/README.md b/node_modules/webidl-conversions/README.md new file mode 100644 index 0000000..3657890 --- /dev/null +++ b/node_modules/webidl-conversions/README.md @@ -0,0 +1,53 @@ +# WebIDL Type Conversions on JavaScript Values + +This package implements, in JavaScript, the algorithms to convert a given JavaScript value according to a given [WebIDL](http://heycam.github.io/webidl/) [type](http://heycam.github.io/webidl/#idl-types). + +The goal is that you should be able to write code like + +```js +const conversions = require("webidl-conversions"); + +function doStuff(x, y) { + x = conversions["boolean"](x); + y = conversions["unsigned long"](y); + // actual algorithm code here +} +``` + +and your function `doStuff` will behave the same as a WebIDL operation declared as + +```webidl +void doStuff(boolean x, unsigned long y); +``` + +## API + +This package's main module's default export is an object with a variety of methods, each corresponding to a different WebIDL type. Each method, when invoked on a JavaScript value, will give back the new JavaScript value that results after passing through the WebIDL conversion rules. (See below for more details on what that means.) Alternately, the method could throw an error, if the WebIDL algorithm is specified to do so: for example `conversions["float"](NaN)` [will throw a `TypeError`](http://heycam.github.io/webidl/#es-float). + +## Status + +All of the numeric types are implemented (float being implemented as double) and some others are as well - check the source for all of them. This list will grow over time in service of the [HTML as Custom Elements](https://github.com/dglazkov/html-as-custom-elements) project, but in the meantime, pull requests welcome! + +I'm not sure yet what the strategy will be for modifiers, e.g. [`[Clamp]`](http://heycam.github.io/webidl/#Clamp). Maybe something like `conversions["unsigned long"](x, { clamp: true })`? We'll see. + +We might also want to extend the API to give better error messages, e.g. "Argument 1 of HTMLMediaElement.fastSeek is not a finite floating-point value" instead of "Argument is not a finite floating-point value." This would require passing in more information to the conversion functions than we currently do. + +## Background + +What's actually going on here, conceptually, is pretty weird. Let's try to explain. + +WebIDL, as part of its madness-inducing design, has its own type system. When people write algorithms in web platform specs, they usually operate on WebIDL values, i.e. instances of WebIDL types. For example, if they were specifying the algorithm for our `doStuff` operation above, they would treat `x` as a WebIDL value of [WebIDL type `boolean`](http://heycam.github.io/webidl/#idl-boolean). Crucially, they would _not_ treat `x` as a JavaScript variable whose value is either the JavaScript `true` or `false`. They're instead working in a different type system altogether, with its own rules. + +Separately from its type system, WebIDL defines a ["binding"](http://heycam.github.io/webidl/#ecmascript-binding) of the type system into JavaScript. This contains rules like: when you pass a JavaScript value to the JavaScript method that manifests a given WebIDL operation, how does that get converted into a WebIDL value? For example, a JavaScript `true` passed in the position of a WebIDL `boolean` argument becomes a WebIDL `true`. But, a JavaScript `true` passed in the position of a [WebIDL `unsigned long`](http://heycam.github.io/webidl/#idl-unsigned-long) becomes a WebIDL `1`. And so on. + +Finally, we have the actual implementation code. This is usually C++, although these days [some smart people are using Rust](https://github.com/servo/servo). The implementation, of course, has its own type system. So when they implement the WebIDL algorithms, they don't actually use WebIDL values, since those aren't "real" outside of specs. Instead, implementations apply the WebIDL binding rules in such a way as to convert incoming JavaScript values into C++ values. For example, if code in the browser called `doStuff(true, true)`, then the implementation code would eventually receive a C++ `bool` containing `true` and a C++ `uint32_t` containing `1`. + +The upside of all this is that implementations can abstract all the conversion logic away, letting WebIDL handle it, and focus on implementing the relevant methods in C++ with values of the correct type already provided. That is payoff of WebIDL, in a nutshell. + +And getting to that payoff is the goal of _this_ project—but for JavaScript implementations, instead of C++ ones. That is, this library is designed to make it easier for JavaScript developers to write functions that behave like a given WebIDL operation. So conceptually, the conversion pipeline, which in its general form is JavaScript values ↦ WebIDL values ↦ implementation-language values, in this case becomes JavaScript values ↦ WebIDL values ↦ JavaScript values. And that intermediate step is where all the logic is performed: a JavaScript `true` becomes a WebIDL `1` in an unsigned long context, which then becomes a JavaScript `1`. + +## Don't Use This + +Seriously, why would you ever use this? You really shouldn't. WebIDL is … not great, and you shouldn't be emulating its semantics. If you're looking for a generic argument-processing library, you should find one with better rules than those from WebIDL. In general, your JavaScript should not be trying to become more like WebIDL; if anything, we should fix WebIDL to make it more like JavaScript. + +The _only_ people who should use this are those trying to create faithful implementations (or polyfills) of web platform interfaces defined in WebIDL. diff --git a/node_modules/webidl-conversions/lib/index.js b/node_modules/webidl-conversions/lib/index.js new file mode 100644 index 0000000..c5153a3 --- /dev/null +++ b/node_modules/webidl-conversions/lib/index.js @@ -0,0 +1,189 @@ +"use strict"; + +var conversions = {}; +module.exports = conversions; + +function sign(x) { + return x < 0 ? -1 : 1; +} + +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } +} + +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; + + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); + + return function(V, opts) { + if (!opts) opts = {}; + + let x = +V; + + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } + + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } + + return x; + } + + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); + + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } + + if (!Number.isFinite(x) || x === 0) { + return 0; + } + + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; + + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } + + return x; + } +} + +conversions["void"] = function () { + return undefined; +}; + +conversions["boolean"] = function (val) { + return !!val; +}; + +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); + +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); + +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); + +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); + +conversions["double"] = function (V) { + const x = +V; + + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } + + return x; +}; + +conversions["unrestricted double"] = function (V) { + const x = +V; + + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } + + return x; +}; + +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; + +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); +}; + +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } + + return x; +}; + +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } + + return U.join(''); +}; + +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } + + return V; +}; + +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; diff --git a/node_modules/webidl-conversions/package.json b/node_modules/webidl-conversions/package.json new file mode 100644 index 0000000..c31bc07 --- /dev/null +++ b/node_modules/webidl-conversions/package.json @@ -0,0 +1,23 @@ +{ + "name": "webidl-conversions", + "version": "3.0.1", + "description": "Implements the WebIDL algorithms for converting to and from JavaScript values", + "main": "lib/index.js", + "scripts": { + "test": "mocha test/*.js" + }, + "repository": "jsdom/webidl-conversions", + "keywords": [ + "webidl", + "web", + "types" + ], + "files": [ + "lib/" + ], + "author": "Domenic Denicola (https://domenic.me/)", + "license": "BSD-2-Clause", + "devDependencies": { + "mocha": "^1.21.4" + } +} diff --git a/node_modules/whatwg-url/LICENSE.txt b/node_modules/whatwg-url/LICENSE.txt new file mode 100644 index 0000000..54dfac3 --- /dev/null +++ b/node_modules/whatwg-url/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015–2016 Sebastian Mayr + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/whatwg-url/README.md b/node_modules/whatwg-url/README.md new file mode 100644 index 0000000..4347a7f --- /dev/null +++ b/node_modules/whatwg-url/README.md @@ -0,0 +1,67 @@ +# whatwg-url + +whatwg-url is a full implementation of the WHATWG [URL Standard](https://url.spec.whatwg.org/). It can be used standalone, but it also exposes a lot of the internal algorithms that are useful for integrating a URL parser into a project like [jsdom](https://github.com/tmpvar/jsdom). + +## Current Status + +whatwg-url is currently up to date with the URL spec up to commit [a62223](https://github.com/whatwg/url/commit/a622235308342c9adc7fc2fd1659ff059f7d5e2a). + +## API + +### The `URL` Constructor + +The main API is the [`URL`](https://url.spec.whatwg.org/#url) export, which follows the spec's behavior in all ways (including e.g. `USVString` conversion). Most consumers of this library will want to use this. + +### Low-level URL Standard API + +The following methods are exported for use by places like jsdom that need to implement things like [`HTMLHyperlinkElementUtils`](https://html.spec.whatwg.org/#htmlhyperlinkelementutils). They operate on or return an "internal URL" or ["URL record"](https://url.spec.whatwg.org/#concept-url) type. + +- [URL parser](https://url.spec.whatwg.org/#concept-url-parser): `parseURL(input, { baseURL, encodingOverride })` +- [Basic URL parser](https://url.spec.whatwg.org/#concept-basic-url-parser): `basicURLParse(input, { baseURL, encodingOverride, url, stateOverride })` +- [URL serializer](https://url.spec.whatwg.org/#concept-url-serializer): `serializeURL(urlRecord, excludeFragment)` +- [Host serializer](https://url.spec.whatwg.org/#concept-host-serializer): `serializeHost(hostFromURLRecord)` +- [Serialize an integer](https://url.spec.whatwg.org/#serialize-an-integer): `serializeInteger(number)` +- [Origin](https://url.spec.whatwg.org/#concept-url-origin) [serializer](https://html.spec.whatwg.org/multipage/browsers.html#serialization-of-an-origin): `serializeURLOrigin(urlRecord)` +- [Set the username](https://url.spec.whatwg.org/#set-the-username): `setTheUsername(urlRecord, usernameString)` +- [Set the password](https://url.spec.whatwg.org/#set-the-password): `setThePassword(urlRecord, passwordString)` +- [Cannot have a username/password/port](https://url.spec.whatwg.org/#cannot-have-a-username-password-port): `cannotHaveAUsernamePasswordPort(urlRecord)` + +The `stateOverride` parameter is one of the following strings: + +- [`"scheme start"`](https://url.spec.whatwg.org/#scheme-start-state) +- [`"scheme"`](https://url.spec.whatwg.org/#scheme-state) +- [`"no scheme"`](https://url.spec.whatwg.org/#no-scheme-state) +- [`"special relative or authority"`](https://url.spec.whatwg.org/#special-relative-or-authority-state) +- [`"path or authority"`](https://url.spec.whatwg.org/#path-or-authority-state) +- [`"relative"`](https://url.spec.whatwg.org/#relative-state) +- [`"relative slash"`](https://url.spec.whatwg.org/#relative-slash-state) +- [`"special authority slashes"`](https://url.spec.whatwg.org/#special-authority-slashes-state) +- [`"special authority ignore slashes"`](https://url.spec.whatwg.org/#special-authority-ignore-slashes-state) +- [`"authority"`](https://url.spec.whatwg.org/#authority-state) +- [`"host"`](https://url.spec.whatwg.org/#host-state) +- [`"hostname"`](https://url.spec.whatwg.org/#hostname-state) +- [`"port"`](https://url.spec.whatwg.org/#port-state) +- [`"file"`](https://url.spec.whatwg.org/#file-state) +- [`"file slash"`](https://url.spec.whatwg.org/#file-slash-state) +- [`"file host"`](https://url.spec.whatwg.org/#file-host-state) +- [`"path start"`](https://url.spec.whatwg.org/#path-start-state) +- [`"path"`](https://url.spec.whatwg.org/#path-state) +- [`"cannot-be-a-base-URL path"`](https://url.spec.whatwg.org/#cannot-be-a-base-url-path-state) +- [`"query"`](https://url.spec.whatwg.org/#query-state) +- [`"fragment"`](https://url.spec.whatwg.org/#fragment-state) + +The URL record type has the following API: + +- [`scheme`](https://url.spec.whatwg.org/#concept-url-scheme) +- [`username`](https://url.spec.whatwg.org/#concept-url-username) +- [`password`](https://url.spec.whatwg.org/#concept-url-password) +- [`host`](https://url.spec.whatwg.org/#concept-url-host) +- [`port`](https://url.spec.whatwg.org/#concept-url-port) +- [`path`](https://url.spec.whatwg.org/#concept-url-path) (as an array) +- [`query`](https://url.spec.whatwg.org/#concept-url-query) +- [`fragment`](https://url.spec.whatwg.org/#concept-url-fragment) +- [`cannotBeABaseURL`](https://url.spec.whatwg.org/#url-cannot-be-a-base-url-flag) (as a boolean) + +These properties should be treated with care, as in general changing them will cause the URL record to be in an inconsistent state until the appropriate invocation of `basicURLParse` is used to fix it up. You can see examples of this in the URL Standard, where there are many step sequences like "4. Set context object’s url’s fragment to the empty string. 5. Basic URL parse _input_ with context object’s url as _url_ and fragment state as _state override_." In between those two steps, a URL record is in an unusable state. + +The return value of "failure" in the spec is represented by the string `"failure"`. That is, functions like `parseURL` and `basicURLParse` can return _either_ a URL record _or_ the string `"failure"`. diff --git a/node_modules/whatwg-url/lib/URL-impl.js b/node_modules/whatwg-url/lib/URL-impl.js new file mode 100644 index 0000000..dc7452c --- /dev/null +++ b/node_modules/whatwg-url/lib/URL-impl.js @@ -0,0 +1,200 @@ +"use strict"; +const usm = require("./url-state-machine"); + +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; + + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } + + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + + // TODO: query stuff + } + + get href() { + return usm.serializeURL(this._url); + } + + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + } + + get origin() { + return usm.serializeURLOrigin(this._url); + } + + get protocol() { + return this._url.scheme + ":"; + } + + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } + + get username() { + return this._url.username; + } + + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setTheUsername(this._url, v); + } + + get password() { + return this._url.password; + } + + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setThePassword(this._url, v); + } + + get host() { + const url = this._url; + + if (url.host === null) { + return ""; + } + + if (url.port === null) { + return usm.serializeHost(url.host); + } + + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } + + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } + + get hostname() { + if (this._url.host === null) { + return ""; + } + + return usm.serializeHost(this._url.host); + } + + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); + } + + get port() { + if (this._url.port === null) { + return ""; + } + + return usm.serializeInteger(this._url.port); + } + + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + } + } + + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } + + if (this._url.path.length === 0) { + return ""; + } + + return "/" + this._url.path.join("/"); + } + + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); + } + + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } + + return "?" + this._url.query; + } + + set search(v) { + // TODO: query stuff + + const url = this._url; + + if (v === "") { + url.query = null; + return; + } + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); + } + + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } + + return "#" + this._url.fragment; + } + + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } + + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); + } + + toJSON() { + return this.href; + } +}; diff --git a/node_modules/whatwg-url/lib/URL.js b/node_modules/whatwg-url/lib/URL.js new file mode 100644 index 0000000..78c7207 --- /dev/null +++ b/node_modules/whatwg-url/lib/URL.js @@ -0,0 +1,196 @@ +"use strict"; + +const conversions = require("webidl-conversions"); +const utils = require("./utils.js"); +const Impl = require(".//URL-impl.js"); + +const impl = utils.implSymbol; + +function URL(url) { + if (!this || this[impl] || !(this instanceof URL)) { + throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); + } + if (arguments.length < 1) { + throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); + } + const args = []; + for (let i = 0; i < arguments.length && i < 2; ++i) { + args[i] = arguments[i]; + } + args[0] = conversions["USVString"](args[0]); + if (args[1] !== undefined) { + args[1] = conversions["USVString"](args[1]); + } + + module.exports.setup(this, args); +} + +URL.prototype.toJSON = function toJSON() { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + const args = []; + for (let i = 0; i < arguments.length && i < 0; ++i) { + args[i] = arguments[i]; + } + return this[impl].toJSON.apply(this[impl], args); +}; +Object.defineProperty(URL.prototype, "href", { + get() { + return this[impl].href; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].href = V; + }, + enumerable: true, + configurable: true +}); + +URL.prototype.toString = function () { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + return this.href; +}; + +Object.defineProperty(URL.prototype, "origin", { + get() { + return this[impl].origin; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "protocol", { + get() { + return this[impl].protocol; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].protocol = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "username", { + get() { + return this[impl].username; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].username = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "password", { + get() { + return this[impl].password; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].password = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "host", { + get() { + return this[impl].host; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].host = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hostname", { + get() { + return this[impl].hostname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hostname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "port", { + get() { + return this[impl].port; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].port = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "pathname", { + get() { + return this[impl].pathname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].pathname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "search", { + get() { + return this[impl].search; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].search = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hash", { + get() { + return this[impl].hash; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hash = V; + }, + enumerable: true, + configurable: true +}); + + +module.exports = { + is(obj) { + return !!obj && obj[impl] instanceof Impl.implementation; + }, + create(constructorArgs, privateData) { + let obj = Object.create(URL.prototype); + this.setup(obj, constructorArgs, privateData); + return obj; + }, + setup(obj, constructorArgs, privateData) { + if (!privateData) privateData = {}; + privateData.wrapper = obj; + + obj[impl] = new Impl.implementation(constructorArgs, privateData); + obj[impl][utils.wrapperSymbol] = obj; + }, + interface: URL, + expose: { + Window: { URL: URL }, + Worker: { URL: URL } + } +}; + diff --git a/node_modules/whatwg-url/lib/public-api.js b/node_modules/whatwg-url/lib/public-api.js new file mode 100644 index 0000000..932dcad --- /dev/null +++ b/node_modules/whatwg-url/lib/public-api.js @@ -0,0 +1,11 @@ +"use strict"; + +exports.URL = require("./URL").interface; +exports.serializeURL = require("./url-state-machine").serializeURL; +exports.serializeURLOrigin = require("./url-state-machine").serializeURLOrigin; +exports.basicURLParse = require("./url-state-machine").basicURLParse; +exports.setTheUsername = require("./url-state-machine").setTheUsername; +exports.setThePassword = require("./url-state-machine").setThePassword; +exports.serializeHost = require("./url-state-machine").serializeHost; +exports.serializeInteger = require("./url-state-machine").serializeInteger; +exports.parseURL = require("./url-state-machine").parseURL; diff --git a/node_modules/whatwg-url/lib/url-state-machine.js b/node_modules/whatwg-url/lib/url-state-machine.js new file mode 100644 index 0000000..c25dbc2 --- /dev/null +++ b/node_modules/whatwg-url/lib/url-state-machine.js @@ -0,0 +1,1297 @@ +"use strict"; +const punycode = require("punycode"); +const tr46 = require("tr46"); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; diff --git a/node_modules/whatwg-url/lib/utils.js b/node_modules/whatwg-url/lib/utils.js new file mode 100644 index 0000000..a562009 --- /dev/null +++ b/node_modules/whatwg-url/lib/utils.js @@ -0,0 +1,20 @@ +"use strict"; + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + diff --git a/node_modules/whatwg-url/package.json b/node_modules/whatwg-url/package.json new file mode 100644 index 0000000..fce35ae --- /dev/null +++ b/node_modules/whatwg-url/package.json @@ -0,0 +1,32 @@ +{ + "name": "whatwg-url", + "version": "5.0.0", + "description": "An implementation of the WHATWG URL Standard's URL API and parsing machinery", + "main": "lib/public-api.js", + "files": [ + "lib/" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "repository": "jsdom/whatwg-url", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + }, + "devDependencies": { + "eslint": "^2.6.0", + "istanbul": "~0.4.3", + "mocha": "^2.2.4", + "recast": "~0.10.29", + "request": "^2.55.0", + "webidl2js": "^3.0.2" + }, + "scripts": { + "build": "node scripts/transform.js && node scripts/convert-idl.js", + "coverage": "istanbul cover node_modules/mocha/bin/_mocha", + "lint": "eslint .", + "prepublish": "npm run build", + "pretest": "node scripts/get-latest-platform-tests.js && npm run build", + "test": "mocha" + } +} diff --git a/node_modules/xml2js/LICENSE b/node_modules/xml2js/LICENSE new file mode 100644 index 0000000..e3b4222 --- /dev/null +++ b/node_modules/xml2js/LICENSE @@ -0,0 +1,19 @@ +Copyright 2010, 2011, 2012, 2013. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/xml2js/README.md b/node_modules/xml2js/README.md new file mode 100644 index 0000000..67f2104 --- /dev/null +++ b/node_modules/xml2js/README.md @@ -0,0 +1,507 @@ +node-xml2js +=========== + +Ever had the urge to parse XML? And wanted to access the data in some sane, +easy way? Don't want to compile a C parser, for whatever reason? Then xml2js is +what you're looking for! + +Description +=========== + +Simple XML to JavaScript object converter. It supports bi-directional conversion. +Uses [sax-js](https://github.com/isaacs/sax-js/) and +[xmlbuilder-js](https://github.com/oozcitak/xmlbuilder-js/). + +Note: If you're looking for a full DOM parser, you probably want +[JSDom](https://github.com/tmpvar/jsdom). + +Installation +============ + +Simplest way to install `xml2js` is to use [npm](http://npmjs.org), just `npm +install xml2js` which will download xml2js and all dependencies. + +xml2js is also available via [Bower](http://bower.io/), just `bower install +xml2js` which will download xml2js and all dependencies. + +Usage +===== + +No extensive tutorials required because you are a smart developer! The task of +parsing XML should be an easy one, so let's make it so! Here's some examples. + +Shoot-and-forget usage +---------------------- + +You want to parse XML as simple and easy as possible? It's dangerous to go +alone, take this: + +```javascript +var parseString = require('xml2js').parseString; +var xml = "Hello xml2js!" +parseString(xml, function (err, result) { + console.dir(result); +}); +``` + +Can't get easier than this, right? This works starting with `xml2js` 0.2.3. +With CoffeeScript it looks like this: + +```coffeescript +{parseString} = require 'xml2js' +xml = "Hello xml2js!" +parseString xml, (err, result) -> + console.dir result +``` + +If you need some special options, fear not, `xml2js` supports a number of +options (see below), you can specify these as second argument: + +```javascript +parseString(xml, {trim: true}, function (err, result) { +}); +``` + +Simple as pie usage +------------------- + +That's right, if you have been using xml-simple or a home-grown +wrapper, this was added in 0.1.11 just for you: + +```javascript +var fs = require('fs'), + xml2js = require('xml2js'); + +var parser = new xml2js.Parser(); +fs.readFile(__dirname + '/foo.xml', function(err, data) { + parser.parseString(data, function (err, result) { + console.dir(result); + console.log('Done'); + }); +}); +``` + +Look ma, no event listeners! + +You can also use `xml2js` from +[CoffeeScript](https://github.com/jashkenas/coffeescript), further reducing +the clutter: + +```coffeescript +fs = require 'fs', +xml2js = require 'xml2js' + +parser = new xml2js.Parser() +fs.readFile __dirname + '/foo.xml', (err, data) -> + parser.parseString data, (err, result) -> + console.dir result + console.log 'Done.' +``` + +But what happens if you forget the `new` keyword to create a new `Parser`? In +the middle of a nightly coding session, it might get lost, after all. Worry +not, we got you covered! Starting with 0.2.8 you can also leave it out, in +which case `xml2js` will helpfully add it for you, no bad surprises and +inexplicable bugs! + +Promise usage +------------- + +```javascript +var xml2js = require('xml2js'); +var xml = ''; + +// With parser +var parser = new xml2js.Parser(/* options */); +parser.parseStringPromise(xml).then(function (result) { + console.dir(result); + console.log('Done'); +}) +.catch(function (err) { + // Failed +}); + +// Without parser +xml2js.parseStringPromise(xml /*, options */).then(function (result) { + console.dir(result); + console.log('Done'); +}) +.catch(function (err) { + // Failed +}); +``` + +Parsing multiple files +---------------------- + +If you want to parse multiple files, you have multiple possibilities: + + * You can create one `xml2js.Parser` per file. That's the recommended one + and is promised to always *just work*. + * You can call `reset()` on your parser object. + * You can hope everything goes well anyway. This behaviour is not + guaranteed work always, if ever. Use option #1 if possible. Thanks! + +So you wanna some JSON? +----------------------- + +Just wrap the `result` object in a call to `JSON.stringify` like this +`JSON.stringify(result)`. You get a string containing the JSON representation +of the parsed object that you can feed to JSON-hungry consumers. + +Displaying results +------------------ + +You might wonder why, using `console.dir` or `console.log` the output at some +level is only `[Object]`. Don't worry, this is not because `xml2js` got lazy. +That's because Node uses `util.inspect` to convert the object into strings and +that function stops after `depth=2` which is a bit low for most XML. + +To display the whole deal, you can use `console.log(util.inspect(result, false, +null))`, which displays the whole result. + +So much for that, but what if you use +[eyes](https://github.com/cloudhead/eyes.js) for nice colored output and it +truncates the output with `…`? Don't fear, there's also a solution for that, +you just need to increase the `maxLength` limit by creating a custom inspector +`var inspect = require('eyes').inspector({maxLength: false})` and then you can +easily `inspect(result)`. + +XML builder usage +----------------- + +Since 0.4.0, objects can be also be used to build XML: + +```javascript +var xml2js = require('xml2js'); + +var obj = {name: "Super", Surname: "Man", age: 23}; + +var builder = new xml2js.Builder(); +var xml = builder.buildObject(obj); +``` +will result in: + +```xml + + + Super + Man + 23 + +``` + +At the moment, a one to one bi-directional conversion is guaranteed only for +default configuration, except for `attrkey`, `charkey` and `explicitArray` options +you can redefine to your taste. Writing CDATA is supported via setting the `cdata` +option to `true`. + +To specify attributes: +```javascript +var xml2js = require('xml2js'); + +var obj = {root: {$: {id: "my id"}, _: "my inner text"}}; + +var builder = new xml2js.Builder(); +var xml = builder.buildObject(obj); +``` +will result in: +```xml + +my inner text +``` + +### Adding xmlns attributes + +You can generate XML that declares XML namespace prefix / URI pairs with xmlns attributes. + +Example declaring a default namespace on the root element: + +```javascript +let obj = { + Foo: { + $: { + "xmlns": "http://foo.com" + } + } +}; +``` +Result of `buildObject(obj)`: +```xml + +``` +Example declaring non-default namespaces on non-root elements: +```javascript +let obj = { + 'foo:Foo': { + $: { + 'xmlns:foo': 'http://foo.com' + }, + 'bar:Bar': { + $: { + 'xmlns:bar': 'http://bar.com' + } + } + } +} +``` +Result of `buildObject(obj)`: +```xml + + + +``` + + +Processing attribute, tag names and values +------------------------------------------ + +Since 0.4.1 you can optionally provide the parser with attribute name and tag name processors as well as element value processors (Since 0.4.14, you can also optionally provide the parser with attribute value processors): + +```javascript + +function nameToUpperCase(name){ + return name.toUpperCase(); +} + +//transform all attribute and tag names and values to uppercase +parseString(xml, { + tagNameProcessors: [nameToUpperCase], + attrNameProcessors: [nameToUpperCase], + valueProcessors: [nameToUpperCase], + attrValueProcessors: [nameToUpperCase]}, + function (err, result) { + // processed data +}); +``` + +The `tagNameProcessors` and `attrNameProcessors` options +accept an `Array` of functions with the following signature: + +```javascript +function (name){ + //do something with `name` + return name +} +``` + +The `attrValueProcessors` and `valueProcessors` options +accept an `Array` of functions with the following signature: + +```javascript +function (value, name) { + //`name` will be the node name or attribute name + //do something with `value`, (optionally) dependent on the node/attr name + return value +} +``` + +Some processors are provided out-of-the-box and can be found in `lib/processors.js`: + +- `normalize`: transforms the name to lowercase. +(Automatically used when `options.normalize` is set to `true`) + +- `firstCharLowerCase`: transforms the first character to lower case. +E.g. 'MyTagName' becomes 'myTagName' + +- `stripPrefix`: strips the xml namespace prefix. E.g `` will become 'Bar'. +(N.B.: the `xmlns` prefix is NOT stripped.) + +- `parseNumbers`: parses integer-like strings as integers and float-like strings as floats +E.g. "0" becomes 0 and "15.56" becomes 15.56 + +- `parseBooleans`: parses boolean-like strings to booleans +E.g. "true" becomes true and "False" becomes false + +Options +======= + +Apart from the default settings, there are a number of options that can be +specified for the parser. Options are specified by ``new Parser({optionName: +value})``. Possible options are: + + * `attrkey` (default: `$`): Prefix that is used to access the attributes. + Version 0.1 default was `@`. + * `charkey` (default: `_`): Prefix that is used to access the character + content. Version 0.1 default was `#`. + * `explicitCharkey` (default: `false`) Determines whether or not to use + a `charkey` prefix for elements with no attributes. + * `trim` (default: `false`): Trim the whitespace at the beginning and end of + text nodes. + * `normalizeTags` (default: `false`): Normalize all tag names to lowercase. + * `normalize` (default: `false`): Trim whitespaces inside text nodes. + * `explicitRoot` (default: `true`): Set this if you want to get the root + node in the resulting object. + * `emptyTag` (default: `''`): what will the value of empty nodes be. In case + you want to use an empty object as a default value, it is better to provide a factory + function `() => ({})` instead. Without this function a plain object would + become a shared reference across all occurrences with unwanted behavior. + * `explicitArray` (default: `true`): Always put child nodes in an array if + true; otherwise an array is created only if there is more than one. + * `ignoreAttrs` (default: `false`): Ignore all XML attributes and only create + text nodes. + * `mergeAttrs` (default: `false`): Merge attributes and child elements as + properties of the parent, instead of keying attributes off a child + attribute object. This option is ignored if `ignoreAttrs` is `true`. + * `validator` (default `null`): You can specify a callable that validates + the resulting structure somehow, however you want. See unit tests + for an example. + * `xmlns` (default `false`): Give each element a field usually called '$ns' + (the first character is the same as attrkey) that contains its local name + and namespace URI. + * `explicitChildren` (default `false`): Put child elements to separate + property. Doesn't work with `mergeAttrs = true`. If element has no children + then "children" won't be created. Added in 0.2.5. + * `childkey` (default `$$`): Prefix that is used to access child elements if + `explicitChildren` is set to `true`. Added in 0.2.5. + * `preserveChildrenOrder` (default `false`): Modifies the behavior of + `explicitChildren` so that the value of the "children" property becomes an + ordered array. When this is `true`, every node will also get a `#name` field + whose value will correspond to the XML nodeName, so that you may iterate + the "children" array and still be able to determine node names. The named + (and potentially unordered) properties are also retained in this + configuration at the same level as the ordered "children" array. Added in + 0.4.9. + * `charsAsChildren` (default `false`): Determines whether chars should be + considered children if `explicitChildren` is on. Added in 0.2.5. + * `includeWhiteChars` (default `false`): Determines whether whitespace-only + text nodes should be included. Added in 0.4.17. + * `async` (default `false`): Should the callbacks be async? This *might* be + an incompatible change if your code depends on sync execution of callbacks. + Future versions of `xml2js` might change this default, so the recommendation + is to not depend on sync execution anyway. Added in 0.2.6. + * `strict` (default `true`): Set sax-js to strict or non-strict parsing mode. + Defaults to `true` which is *highly* recommended, since parsing HTML which + is not well-formed XML might yield just about anything. Added in 0.2.7. + * `attrNameProcessors` (default: `null`): Allows the addition of attribute + name processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (name){ + //do something with `name` + return name + } + ``` + Added in 0.4.14 + * `attrValueProcessors` (default: `null`): Allows the addition of attribute + value processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (value, name){ + //do something with `name` + return name + } + ``` + Added in 0.4.1 + * `tagNameProcessors` (default: `null`): Allows the addition of tag name + processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (name){ + //do something with `name` + return name + } + ``` + Added in 0.4.1 + * `valueProcessors` (default: `null`): Allows the addition of element value + processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (value, name){ + //do something with `name` + return name + } + ``` + Added in 0.4.6 + +Options for the `Builder` class +------------------------------- +These options are specified by ``new Builder({optionName: value})``. +Possible options are: + + * `attrkey` (default: `$`): Prefix that is used to access the attributes. + Version 0.1 default was `@`. + * `charkey` (default: `_`): Prefix that is used to access the character + content. Version 0.1 default was `#`. + * `rootName` (default `root` or the root key name): root element name to be used in case + `explicitRoot` is `false` or to override the root element name. + * `renderOpts` (default `{ 'pretty': true, 'indent': ' ', 'newline': '\n' }`): + Rendering options for xmlbuilder-js. + * pretty: prettify generated XML + * indent: whitespace for indentation (only when pretty) + * newline: newline char (only when pretty) + * `xmldec` (default `{ 'version': '1.0', 'encoding': 'UTF-8', 'standalone': true }`: + XML declaration attributes. + * `xmldec.version` A version number string, e.g. 1.0 + * `xmldec.encoding` Encoding declaration, e.g. UTF-8 + * `xmldec.standalone` standalone document declaration: true or false + * `doctype` (default `null`): optional DTD. Eg. `{'ext': 'hello.dtd'}` + * `headless` (default: `false`): omit the XML header. Added in 0.4.3. + * `allowSurrogateChars` (default: `false`): allows using characters from the Unicode + surrogate blocks. + * `cdata` (default: `false`): wrap text nodes in `` instead of + escaping when necessary. Does not add `` if it is not required. + Added in 0.4.5. + +`renderOpts`, `xmldec`,`doctype` and `headless` pass through to +[xmlbuilder-js](https://github.com/oozcitak/xmlbuilder-js). + +Updating to new version +======================= + +Version 0.2 changed the default parsing settings, but version 0.1.14 introduced +the default settings for version 0.2, so these settings can be tried before the +migration. + +```javascript +var xml2js = require('xml2js'); +var parser = new xml2js.Parser(xml2js.defaults["0.2"]); +``` + +To get the 0.1 defaults in version 0.2 you can just use +`xml2js.defaults["0.1"]` in the same place. This provides you with enough time +to migrate to the saner way of parsing in `xml2js` 0.2. We try to make the +migration as simple and gentle as possible, but some breakage cannot be +avoided. + +So, what exactly did change and why? In 0.2 we changed some defaults to parse +the XML in a more universal and sane way. So we disabled `normalize` and `trim` +so `xml2js` does not cut out any text content. You can reenable this at will of +course. A more important change is that we return the root tag in the resulting +JavaScript structure via the `explicitRoot` setting, so you need to access the +first element. This is useful for anybody who wants to know what the root node +is and preserves more information. The last major change was to enable +`explicitArray`, so everytime it is possible that one might embed more than one +sub-tag into a tag, xml2js >= 0.2 returns an array even if the array just +includes one element. This is useful when dealing with APIs that return +variable amounts of subtags. + +Running tests, development +========================== + +[![Build Status](https://travis-ci.org/Leonidas-from-XIV/node-xml2js.svg?branch=master)](https://travis-ci.org/Leonidas-from-XIV/node-xml2js) +[![Coverage Status](https://coveralls.io/repos/Leonidas-from-XIV/node-xml2js/badge.svg?branch=)](https://coveralls.io/r/Leonidas-from-XIV/node-xml2js?branch=master) +[![Dependency Status](https://david-dm.org/Leonidas-from-XIV/node-xml2js.svg)](https://david-dm.org/Leonidas-from-XIV/node-xml2js) + +The development requirements are handled by npm, you just need to install them. +We also have a number of unit tests, they can be run using `npm test` directly +from the project root. This runs zap to discover all the tests and execute +them. + +If you like to contribute, keep in mind that `xml2js` is written in +CoffeeScript, so don't develop on the JavaScript files that are checked into +the repository for convenience reasons. Also, please write some unit test to +check your behaviour and if it is some user-facing thing, add some +documentation to this README, so people will know it exists. Thanks in advance! + +Getting support +=============== + +Please, if you have a problem with the library, first make sure you read this +README. If you read this far, thanks, you're good. Then, please make sure your +problem really is with `xml2js`. It is? Okay, then I'll look at it. Send me a +mail and we can talk. Please don't open issues, as I don't think that is the +proper forum for support problems. Some problems might as well really be bugs +in `xml2js`, if so I'll let you know to open an issue instead :) + +But if you know you really found a bug, feel free to open an issue instead. diff --git a/node_modules/xml2js/lib/bom.js b/node_modules/xml2js/lib/bom.js new file mode 100644 index 0000000..7b8fb27 --- /dev/null +++ b/node_modules/xml2js/lib/bom.js @@ -0,0 +1,12 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + exports.stripBOM = function(str) { + if (str[0] === '\uFEFF') { + return str.substring(1); + } else { + return str; + } + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/builder.js b/node_modules/xml2js/lib/builder.js new file mode 100644 index 0000000..58f3638 --- /dev/null +++ b/node_modules/xml2js/lib/builder.js @@ -0,0 +1,127 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var builder, defaults, escapeCDATA, requiresCDATA, wrapCDATA, + hasProp = {}.hasOwnProperty; + + builder = require('xmlbuilder'); + + defaults = require('./defaults').defaults; + + requiresCDATA = function(entry) { + return typeof entry === "string" && (entry.indexOf('&') >= 0 || entry.indexOf('>') >= 0 || entry.indexOf('<') >= 0); + }; + + wrapCDATA = function(entry) { + return ""; + }; + + escapeCDATA = function(entry) { + return entry.replace(']]>', ']]]]>'); + }; + + exports.Builder = (function() { + function Builder(opts) { + var key, ref, value; + this.options = {}; + ref = defaults["0.2"]; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this.options[key] = value; + } + for (key in opts) { + if (!hasProp.call(opts, key)) continue; + value = opts[key]; + this.options[key] = value; + } + } + + Builder.prototype.buildObject = function(rootObj) { + var attrkey, charkey, render, rootElement, rootName; + attrkey = this.options.attrkey; + charkey = this.options.charkey; + if ((Object.keys(rootObj).length === 1) && (this.options.rootName === defaults['0.2'].rootName)) { + rootName = Object.keys(rootObj)[0]; + rootObj = rootObj[rootName]; + } else { + rootName = this.options.rootName; + } + render = (function(_this) { + return function(element, obj) { + var attr, child, entry, index, key, value; + if (typeof obj !== 'object') { + if (_this.options.cdata && requiresCDATA(obj)) { + element.raw(wrapCDATA(obj)); + } else { + element.txt(obj); + } + } else if (Array.isArray(obj)) { + for (index in obj) { + if (!hasProp.call(obj, index)) continue; + child = obj[index]; + for (key in child) { + entry = child[key]; + element = render(element.ele(key), entry).up(); + } + } + } else { + for (key in obj) { + if (!hasProp.call(obj, key)) continue; + child = obj[key]; + if (key === attrkey) { + if (typeof child === "object") { + for (attr in child) { + value = child[attr]; + element = element.att(attr, value); + } + } + } else if (key === charkey) { + if (_this.options.cdata && requiresCDATA(child)) { + element = element.raw(wrapCDATA(child)); + } else { + element = element.txt(child); + } + } else if (Array.isArray(child)) { + for (index in child) { + if (!hasProp.call(child, index)) continue; + entry = child[index]; + if (typeof entry === 'string') { + if (_this.options.cdata && requiresCDATA(entry)) { + element = element.ele(key).raw(wrapCDATA(entry)).up(); + } else { + element = element.ele(key, entry).up(); + } + } else { + element = render(element.ele(key), entry).up(); + } + } + } else if (typeof child === "object") { + element = render(element.ele(key), child).up(); + } else { + if (typeof child === 'string' && _this.options.cdata && requiresCDATA(child)) { + element = element.ele(key).raw(wrapCDATA(child)).up(); + } else { + if (child == null) { + child = ''; + } + element = element.ele(key, child.toString()).up(); + } + } + } + } + return element; + }; + })(this); + rootElement = builder.create(rootName, this.options.xmldec, this.options.doctype, { + headless: this.options.headless, + allowSurrogateChars: this.options.allowSurrogateChars + }); + return render(rootElement, rootObj).end(this.options.renderOpts); + }; + + return Builder; + + })(); + +}).call(this); diff --git a/node_modules/xml2js/lib/defaults.js b/node_modules/xml2js/lib/defaults.js new file mode 100644 index 0000000..0a21da0 --- /dev/null +++ b/node_modules/xml2js/lib/defaults.js @@ -0,0 +1,72 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + exports.defaults = { + "0.1": { + explicitCharkey: false, + trim: true, + normalize: true, + normalizeTags: false, + attrkey: "@", + charkey: "#", + explicitArray: false, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: false, + validator: null, + xmlns: false, + explicitChildren: false, + childkey: '@@', + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: null, + attrValueProcessors: null, + tagNameProcessors: null, + valueProcessors: null, + emptyTag: '' + }, + "0.2": { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: "$", + charkey: "_", + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: null, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: '$$', + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: null, + attrValueProcessors: null, + tagNameProcessors: null, + valueProcessors: null, + rootName: 'root', + xmldec: { + 'version': '1.0', + 'encoding': 'UTF-8', + 'standalone': true + }, + doctype: null, + renderOpts: { + 'pretty': true, + 'indent': ' ', + 'newline': '\n' + }, + headless: false, + chunkSize: 10000, + emptyTag: '', + cdata: false + } + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/parser.js b/node_modules/xml2js/lib/parser.js new file mode 100644 index 0000000..192382d --- /dev/null +++ b/node_modules/xml2js/lib/parser.js @@ -0,0 +1,385 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var bom, defaults, events, isEmpty, processItem, processors, sax, setImmediate, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + sax = require('sax'); + + events = require('events'); + + bom = require('./bom'); + + processors = require('./processors'); + + setImmediate = require('timers').setImmediate; + + defaults = require('./defaults').defaults; + + isEmpty = function(thing) { + return typeof thing === "object" && (thing != null) && Object.keys(thing).length === 0; + }; + + processItem = function(processors, item, key) { + var i, len, process; + for (i = 0, len = processors.length; i < len; i++) { + process = processors[i]; + item = process(item, key); + } + return item; + }; + + exports.Parser = (function(superClass) { + extend(Parser, superClass); + + function Parser(opts) { + this.parseStringPromise = bind(this.parseStringPromise, this); + this.parseString = bind(this.parseString, this); + this.reset = bind(this.reset, this); + this.assignOrPush = bind(this.assignOrPush, this); + this.processAsync = bind(this.processAsync, this); + var key, ref, value; + if (!(this instanceof exports.Parser)) { + return new exports.Parser(opts); + } + this.options = {}; + ref = defaults["0.2"]; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this.options[key] = value; + } + for (key in opts) { + if (!hasProp.call(opts, key)) continue; + value = opts[key]; + this.options[key] = value; + } + if (this.options.xmlns) { + this.options.xmlnskey = this.options.attrkey + "ns"; + } + if (this.options.normalizeTags) { + if (!this.options.tagNameProcessors) { + this.options.tagNameProcessors = []; + } + this.options.tagNameProcessors.unshift(processors.normalize); + } + this.reset(); + } + + Parser.prototype.processAsync = function() { + var chunk, err; + try { + if (this.remaining.length <= this.options.chunkSize) { + chunk = this.remaining; + this.remaining = ''; + this.saxParser = this.saxParser.write(chunk); + return this.saxParser.close(); + } else { + chunk = this.remaining.substr(0, this.options.chunkSize); + this.remaining = this.remaining.substr(this.options.chunkSize, this.remaining.length); + this.saxParser = this.saxParser.write(chunk); + return setImmediate(this.processAsync); + } + } catch (error1) { + err = error1; + if (!this.saxParser.errThrown) { + this.saxParser.errThrown = true; + return this.emit(err); + } + } + }; + + Parser.prototype.assignOrPush = function(obj, key, newValue) { + if (!(key in obj)) { + if (!this.options.explicitArray) { + return obj[key] = newValue; + } else { + return obj[key] = [newValue]; + } + } else { + if (!(obj[key] instanceof Array)) { + obj[key] = [obj[key]]; + } + return obj[key].push(newValue); + } + }; + + Parser.prototype.reset = function() { + var attrkey, charkey, ontext, stack; + this.removeAllListeners(); + this.saxParser = sax.parser(this.options.strict, { + trim: false, + normalize: false, + xmlns: this.options.xmlns + }); + this.saxParser.errThrown = false; + this.saxParser.onerror = (function(_this) { + return function(error) { + _this.saxParser.resume(); + if (!_this.saxParser.errThrown) { + _this.saxParser.errThrown = true; + return _this.emit("error", error); + } + }; + })(this); + this.saxParser.onend = (function(_this) { + return function() { + if (!_this.saxParser.ended) { + _this.saxParser.ended = true; + return _this.emit("end", _this.resultObject); + } + }; + })(this); + this.saxParser.ended = false; + this.EXPLICIT_CHARKEY = this.options.explicitCharkey; + this.resultObject = null; + stack = []; + attrkey = this.options.attrkey; + charkey = this.options.charkey; + this.saxParser.onopentag = (function(_this) { + return function(node) { + var key, newValue, obj, processedKey, ref; + obj = Object.create(null); + obj[charkey] = ""; + if (!_this.options.ignoreAttrs) { + ref = node.attributes; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + if (!(attrkey in obj) && !_this.options.mergeAttrs) { + obj[attrkey] = Object.create(null); + } + newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key]; + processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key; + if (_this.options.mergeAttrs) { + _this.assignOrPush(obj, processedKey, newValue); + } else { + obj[attrkey][processedKey] = newValue; + } + } + } + obj["#name"] = _this.options.tagNameProcessors ? processItem(_this.options.tagNameProcessors, node.name) : node.name; + if (_this.options.xmlns) { + obj[_this.options.xmlnskey] = { + uri: node.uri, + local: node.local + }; + } + return stack.push(obj); + }; + })(this); + this.saxParser.onclosetag = (function(_this) { + return function() { + var cdata, emptyStr, key, node, nodeName, obj, objClone, old, s, xpath; + obj = stack.pop(); + nodeName = obj["#name"]; + if (!_this.options.explicitChildren || !_this.options.preserveChildrenOrder) { + delete obj["#name"]; + } + if (obj.cdata === true) { + cdata = obj.cdata; + delete obj.cdata; + } + s = stack[stack.length - 1]; + if (obj[charkey].match(/^\s*$/) && !cdata) { + emptyStr = obj[charkey]; + delete obj[charkey]; + } else { + if (_this.options.trim) { + obj[charkey] = obj[charkey].trim(); + } + if (_this.options.normalize) { + obj[charkey] = obj[charkey].replace(/\s{2,}/g, " ").trim(); + } + obj[charkey] = _this.options.valueProcessors ? processItem(_this.options.valueProcessors, obj[charkey], nodeName) : obj[charkey]; + if (Object.keys(obj).length === 1 && charkey in obj && !_this.EXPLICIT_CHARKEY) { + obj = obj[charkey]; + } + } + if (isEmpty(obj)) { + if (typeof _this.options.emptyTag === 'function') { + obj = _this.options.emptyTag(); + } else { + obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr; + } + } + if (_this.options.validator != null) { + xpath = "/" + ((function() { + var i, len, results; + results = []; + for (i = 0, len = stack.length; i < len; i++) { + node = stack[i]; + results.push(node["#name"]); + } + return results; + })()).concat(nodeName).join("/"); + (function() { + var err; + try { + return obj = _this.options.validator(xpath, s && s[nodeName], obj); + } catch (error1) { + err = error1; + return _this.emit("error", err); + } + })(); + } + if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') { + if (!_this.options.preserveChildrenOrder) { + node = Object.create(null); + if (_this.options.attrkey in obj) { + node[_this.options.attrkey] = obj[_this.options.attrkey]; + delete obj[_this.options.attrkey]; + } + if (!_this.options.charsAsChildren && _this.options.charkey in obj) { + node[_this.options.charkey] = obj[_this.options.charkey]; + delete obj[_this.options.charkey]; + } + if (Object.getOwnPropertyNames(obj).length > 0) { + node[_this.options.childkey] = obj; + } + obj = node; + } else if (s) { + s[_this.options.childkey] = s[_this.options.childkey] || []; + objClone = Object.create(null); + for (key in obj) { + if (!hasProp.call(obj, key)) continue; + objClone[key] = obj[key]; + } + s[_this.options.childkey].push(objClone); + delete obj["#name"]; + if (Object.keys(obj).length === 1 && charkey in obj && !_this.EXPLICIT_CHARKEY) { + obj = obj[charkey]; + } + } + } + if (stack.length > 0) { + return _this.assignOrPush(s, nodeName, obj); + } else { + if (_this.options.explicitRoot) { + old = obj; + obj = Object.create(null); + obj[nodeName] = old; + } + _this.resultObject = obj; + _this.saxParser.ended = true; + return _this.emit("end", _this.resultObject); + } + }; + })(this); + ontext = (function(_this) { + return function(text) { + var charChild, s; + s = stack[stack.length - 1]; + if (s) { + s[charkey] += text; + if (_this.options.explicitChildren && _this.options.preserveChildrenOrder && _this.options.charsAsChildren && (_this.options.includeWhiteChars || text.replace(/\\n/g, '').trim() !== '')) { + s[_this.options.childkey] = s[_this.options.childkey] || []; + charChild = { + '#name': '__text__' + }; + charChild[charkey] = text; + if (_this.options.normalize) { + charChild[charkey] = charChild[charkey].replace(/\s{2,}/g, " ").trim(); + } + s[_this.options.childkey].push(charChild); + } + return s; + } + }; + })(this); + this.saxParser.ontext = ontext; + return this.saxParser.oncdata = (function(_this) { + return function(text) { + var s; + s = ontext(text); + if (s) { + return s.cdata = true; + } + }; + })(this); + }; + + Parser.prototype.parseString = function(str, cb) { + var err; + if ((cb != null) && typeof cb === "function") { + this.on("end", function(result) { + this.reset(); + return cb(null, result); + }); + this.on("error", function(err) { + this.reset(); + return cb(err); + }); + } + try { + str = str.toString(); + if (str.trim() === '') { + this.emit("end", null); + return true; + } + str = bom.stripBOM(str); + if (this.options.async) { + this.remaining = str; + setImmediate(this.processAsync); + return this.saxParser; + } + return this.saxParser.write(str).close(); + } catch (error1) { + err = error1; + if (!(this.saxParser.errThrown || this.saxParser.ended)) { + this.emit('error', err); + return this.saxParser.errThrown = true; + } else if (this.saxParser.ended) { + throw err; + } + } + }; + + Parser.prototype.parseStringPromise = function(str) { + return new Promise((function(_this) { + return function(resolve, reject) { + return _this.parseString(str, function(err, value) { + if (err) { + return reject(err); + } else { + return resolve(value); + } + }); + }; + })(this)); + }; + + return Parser; + + })(events); + + exports.parseString = function(str, a, b) { + var cb, options, parser; + if (b != null) { + if (typeof b === 'function') { + cb = b; + } + if (typeof a === 'object') { + options = a; + } + } else { + if (typeof a === 'function') { + cb = a; + } + options = {}; + } + parser = new exports.Parser(options); + return parser.parseString(str, cb); + }; + + exports.parseStringPromise = function(str, a) { + var options, parser; + if (typeof a === 'object') { + options = a; + } + parser = new exports.Parser(options); + return parser.parseStringPromise(str); + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/processors.js b/node_modules/xml2js/lib/processors.js new file mode 100644 index 0000000..89aa85f --- /dev/null +++ b/node_modules/xml2js/lib/processors.js @@ -0,0 +1,34 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var prefixMatch; + + prefixMatch = new RegExp(/(?!xmlns)^.*:/); + + exports.normalize = function(str) { + return str.toLowerCase(); + }; + + exports.firstCharLowerCase = function(str) { + return str.charAt(0).toLowerCase() + str.slice(1); + }; + + exports.stripPrefix = function(str) { + return str.replace(prefixMatch, ''); + }; + + exports.parseNumbers = function(str) { + if (!isNaN(str)) { + str = str % 1 === 0 ? parseInt(str, 10) : parseFloat(str); + } + return str; + }; + + exports.parseBooleans = function(str) { + if (/^(?:true|false)$/i.test(str)) { + str = str.toLowerCase() === 'true'; + } + return str; + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/xml2js.js b/node_modules/xml2js/lib/xml2js.js new file mode 100644 index 0000000..24b6e69 --- /dev/null +++ b/node_modules/xml2js/lib/xml2js.js @@ -0,0 +1,39 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var builder, defaults, parser, processors, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + defaults = require('./defaults'); + + builder = require('./builder'); + + parser = require('./parser'); + + processors = require('./processors'); + + exports.defaults = defaults.defaults; + + exports.processors = processors; + + exports.ValidationError = (function(superClass) { + extend(ValidationError, superClass); + + function ValidationError(message) { + this.message = message; + } + + return ValidationError; + + })(Error); + + exports.Builder = builder.Builder; + + exports.Parser = parser.Parser; + + exports.parseString = parser.parseString; + + exports.parseStringPromise = parser.parseStringPromise; + +}).call(this); diff --git a/node_modules/xml2js/package.json b/node_modules/xml2js/package.json new file mode 100644 index 0000000..bc8eb66 --- /dev/null +++ b/node_modules/xml2js/package.json @@ -0,0 +1,93 @@ +{ + "name": "xml2js", + "description": "Simple XML to JavaScript object converter.", + "keywords": [ + "xml", + "json" + ], + "homepage": "https://github.com/Leonidas-from-XIV/node-xml2js", + "version": "0.5.0", + "author": "Marek Kubica (https://xivilization.net)", + "contributors": [ + "maqr (https://github.com/maqr)", + "Ben Weaver (http://benweaver.com/)", + "Jae Kwon (https://github.com/jaekwon)", + "Jim Robert", + "Ștefan Rusu (http://www.saltwaterc.eu/)", + "Carter Cole (http://cartercole.com/)", + "Kurt Raschke (http://www.kurtraschke.com/)", + "Contra (https://github.com/Contra)", + "Marcelo Diniz (https://github.com/mdiniz)", + "Michael Hart (https://github.com/mhart)", + "Zachary Scott (http://zacharyscott.net/)", + "Raoul Millais (https://github.com/raoulmillais)", + "Salsita Software (http://www.salsitasoft.com/)", + "Mike Schilling (http://www.emotive.com/)", + "Jackson Tian (http://weibo.com/shyvo)", + "Mikhail Zyatin (https://github.com/Sitin)", + "Chris Tavares (https://github.com/christav)", + "Frank Xu (http://f2e.us/)", + "Guido D'Albore (http://www.bitstorm.it/)", + "Jack Senechal (http://jacksenechal.com/)", + "Matthias Hölzl (https://github.com/hoelzl)", + "Camille Reynders (http://www.creynders.be/)", + "Taylor Gautier (https://github.com/tsgautier)", + "Todd Bryan (https://github.com/toddrbryan)", + "Leore Avidar (http://leoreavidar.com/)", + "Dave Aitken (http://www.actionshrimp.com/)", + "Shaney Orrowe ", + "Candle ", + "Jess Telford (http://jes.st)", + "Tom Hughes < (http://compton.nu/)", + "Piotr Rochala (http://rocha.la/)", + "Michael Avila (https://github.com/michaelavila)", + "Ryan Gahl (https://github.com/ryedin)", + "Eric Laberge (https://github.com/elaberge)", + "Benjamin E. Coe (https://twitter.com/benjamincoe)", + "Stephen Cresswell (https://github.com/cressie176)", + "Pascal Ehlert (http://www.hacksrus.net/)", + "Tom Spencer (http://fiznool.com/)", + "Tristian Flanagan (https://github.com/tflanagan)", + "Tim Johns (https://github.com/TimJohns)", + "Bogdan Chadkin (https://github.com/TrySound)", + "David Wood (http://codesleuth.co.uk/)", + "Nicolas Maquet (https://github.com/nmaquet)", + "Lovell Fuller (http://lovell.info/)", + "d3adc0d3 (https://github.com/d3adc0d3)", + "James Crosby (https://github.com/autopulated)" + ], + "main": "./lib/xml2js", + "files": [ + "lib" + ], + "directories": { + "lib": "./lib" + }, + "scripts": { + "build": "cake build", + "test": "zap", + "coverage": "nyc npm test && nyc report", + "coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls", + "doc": "cake doc" + }, + "repository": { + "type": "git", + "url": "https://github.com/Leonidas-from-XIV/node-xml2js.git" + }, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "devDependencies": { + "coffee-script": ">=1.10.0", + "coveralls": "^3.0.1", + "diff": ">=1.0.8", + "docco": ">=0.6.2", + "nyc": ">=2.2.1", + "zap": ">=0.2.9" + }, + "engines": { + "node": ">=4.0.0" + }, + "license": "MIT" +} diff --git a/node_modules/xmlbuilder/CHANGELOG.md b/node_modules/xmlbuilder/CHANGELOG.md new file mode 100644 index 0000000..610f412 --- /dev/null +++ b/node_modules/xmlbuilder/CHANGELOG.md @@ -0,0 +1,470 @@ +# Change Log + +All notable changes to this project are documented in this file. This project adheres to [Semantic Versioning](http://semver.org/#semantic-versioning-200). + +## [11.0.0] - 2019-02-18 +- Calling `end()` with arguments no longer overwrites writer options. See [#120](https://github.com/oozcitak/xmlbuilder-js/issues/120). +- Added writer state and customizable space and endline functions to help customize writer behavior. Also added `openNode` and `closeNode` functions to writer. See [#193](https://github.com/oozcitak/xmlbuilder-js/issues/193). +- Fixed a bug where writer functions would not be called for nodes with a single child node in pretty print mode. See [#195](https://github.com/oozcitak/xmlbuilder-js/issues/195). +- Renamed `elEscape` to `textEscape` in `XMLStringifier`. +- Fixed a bug where empty arrays would produce child nodes. See [#190](https://github.com/oozcitak/xmlbuilder-js/issues/190). +- Removed the `skipNullAttributes` option. `null` attributes are now skipped by default. Added the `keepNullAttributes` option in case someone needs the old behavior. +- Removed the `skipNullNodes` option. `null` nodes are now skipped by default. Added the `keepNullNodes` option in case someone needs the old behavior. +- `undefined` values are now skipped when converting JS objects. +- Renamed stringify functions. See [#194](https://github.com/oozcitak/xmlbuilder-js/issues/194): + * `eleName` -> `name` + * `attName` -> `name` + * `eleText` -> `text` +- Fixed argument order for `attribute` function in the writer. See [#196](https://github.com/oozcitak/xmlbuilder-js/issues/196). +- Added `openAttribute` and `closeAttribute` functions to writer. See [#196](https://github.com/oozcitak/xmlbuilder-js/issues/196). +- Added node types to node objects. Node types and writer states are exported by the module with the `nodeType` and `writerState` properties. +- Fixed a bug where array items would not be correctly converted. See [#159](https://github.com/oozcitak/xmlbuilder-js/issues/159). +- Fixed a bug where mixed-content inside JS objects with `#text` decorator would not be correctly converted. See [#171](https://github.com/oozcitak/xmlbuilder-js/issues/171). +- Fixed a bug where JS objects would not be expanded in callback mode. See [#173](https://github.com/oozcitak/xmlbuilder-js/issues/173). +- Fixed a bug where character validation would not obey document's XML version. Added separate validation for XML 1.0 and XML 1.1 documents. See [#169](https://github.com/oozcitak/xmlbuilder-js/issues/169). +- Fixed a bug where names would not be validated according to the spec. See [#49](https://github.com/oozcitak/xmlbuilder-js/issues/49). +- Renamed `text` property to `value` in comment and cdata nodes to unify the API. +- Removed `doctype` function to prevent name clash with DOM implementation. Use the `dtd` function instead. +- Removed dummy nodes from the XML tree (Those were created while chain-building the tree). +- Renamed `attributes`property to `attribs` to prevent name clash with DOM property with the same name. +- Implemented the DOM standard (read-only) to support XPath lookups. XML namespaces are not currently supported. See [#122](https://github.com/oozcitak/xmlbuilder-js/issues/122). + +## [10.1.1] - 2018-10-24 +- Fixed an edge case where a null node at root level would be printed although `skipNullNodes` was set. See [#187](https://github.com/oozcitak/xmlbuilder-js/issues/187). + +## [10.1.0] - 2018-10-10 +- Added the `skipNullNodes` option to skip nodes with null values. See [#158](https://github.com/oozcitak/xmlbuilder-js/issues/158). + +## [10.0.0] - 2018-04-26 +- Added current indentation level as a parameter to the onData function when in callback mode. See [#125](https://github.com/oozcitak/xmlbuilder-js/issues/125). +- Added name of the current node and parent node to error messages where possible. See [#152](https://github.com/oozcitak/xmlbuilder-js/issues/152). This has the potential to break code depending on the content of error messages. +- Fixed an issue where objects created with Object.create(null) created an error. See [#176](https://github.com/oozcitak/xmlbuilder-js/issues/176). +- Added test builds for node.js v8 and v10. + +## [9.0.7] - 2018-02-09 +- Simplified regex used for validating encoding. + +## [9.0.4] - 2017-08-16 +- `spacebeforeslash` writer option accepts `true` as well as space char(s). + +## [9.0.3] - 2017-08-15 +- `spacebeforeslash` writer option can now be used with XML fragments. + +## [9.0.2] - 2017-08-15 +- Added the `spacebeforeslash` writer option to add a space character before closing tags of empty elements. See +[#157](https://github.com/oozcitak/xmlbuilder-js/issues/157). + +## [9.0.1] - 2017-06-19 +- Fixed character validity checks to work with node.js 4.0 and 5.0. See +[#161](https://github.com/oozcitak/xmlbuilder-js/issues/161). + +## [9.0.0] - 2017-05-05 +- Removed case conversion options. +- Removed support for node.js 4.0 and 5.0. Minimum required version is now 6.0. +- Fixed valid char filter to use XML 1.1 instead of 1.0. See +[#147](https://github.com/oozcitak/xmlbuilder-js/issues/147). +- Added options for negative indentation and suppressing pretty printing of text +nodes. See +[#145](https://github.com/oozcitak/xmlbuilder-js/issues/145). + +## [8.2.2] - 2016-04-08 +- Falsy values can now be used as a text node in callback mode. + +## [8.2.1] - 2016-04-07 +- Falsy values can now be used as a text node. See +[#117](https://github.com/oozcitak/xmlbuilder-js/issues/117). + +## [8.2.0] - 2016-04-01 +- Removed lodash dependency to keep the library small and simple. See +[#114](https://github.com/oozcitak/xmlbuilder-js/issues/114), +[#53](https://github.com/oozcitak/xmlbuilder-js/issues/53), +and [#43](https://github.com/oozcitak/xmlbuilder-js/issues/43). +- Added title case to name conversion options. + +## [8.1.0] - 2016-03-29 +- Added the callback option to the `begin` export function. When used with a +callback function, the XML document will be generated in chunks and each chunk +will be passed to the supplied function. In this mode, `begin` uses a different +code path and the builder should use much less memory since the entire XML tree +is not kept. There are a few drawbacks though. For example, traversing the document +tree or adding attributes to a node after it is written is not possible. It is +also not possible to remove nodes or attributes. + +``` js +var result = ''; + +builder.begin(function(chunk) { result += chunk; }) + .dec() + .ele('root') + .ele('xmlbuilder').up() + .end(); +``` + +- Replaced native `Object.assign` with `lodash.assign` to support old JS engines. See [#111](https://github.com/oozcitak/xmlbuilder-js/issues/111). + +## [8.0.0] - 2016-03-25 +- Added the `begin` export function. See the wiki for details. +- Added the ability to add comments and processing instructions before and after the root element. Added `commentBefore`, `commentAfter`, `instructionBefore` and `instructionAfter` functions for this purpose. +- Dropped support for old node.js releases. Minimum required node.js version is now 4.0. + +## [7.0.0] - 2016-03-21 +- Processing instructions are now created as regular nodes. This is a major breaking change if you are using processing instructions. Previously processing instructions were inserted before their parent node. After this change processing instructions are appended to the children of the parent node. Note that it is not currently possible to insert processing instructions before or after the root element. +```js +root.ele('node').ins('pi'); +// pre-v7 + +// v7 + +``` + +## [6.0.0] - 2016-03-20 +- Added custom XML writers. The default string conversion functions are now collected under the `XMLStringWriter` class which can be accessed by the `stringWriter(options)` function exported by the module. An `XMLStreamWriter` is also added which outputs the XML document to a writable stream. A stream writer can be created by calling the `streamWriter(stream, options)` function exported by the module. Both classes are heavily customizable and the details are added to the wiki. It is also possible to write an XML writer from scratch and use it when calling `end()` on the XML document. + +## [5.0.1] - 2016-03-08 +- Moved require statements for text case conversion to the top of files to reduce lazy requires. + +## [5.0.0] - 2016-03-05 +- Added text case option for element names and attribute names. Valid cases are `lower`, `upper`, `camel`, `kebab` and `snake`. +- Attribute and element values are escaped according to the [Canonical XML 1.0 specification](http://www.w3.org/TR/2000/WD-xml-c14n-20000119.html#charescaping). See [#54](https://github.com/oozcitak/xmlbuilder-js/issues/54) and [#86](https://github.com/oozcitak/xmlbuilder-js/issues/86). +- Added the `allowEmpty` option to `end()`. When this option is set, empty elements are not self-closed. +- Added support for [nested CDATA](https://en.wikipedia.org/wiki/CDATA#Nesting). The triad `]]>` in CDATA is now automatically replaced with `]]]]>`. + +## [4.2.1] - 2016-01-15 +- Updated lodash dependency to 4.0.0. + +## [4.2.0] - 2015-12-16 +- Added the `noDoubleEncoding` option to `create()` to control whether existing html entities are encoded. + +## [4.1.0] - 2015-11-11 +- Added the `separateArrayItems` option to `create()` to control how arrays are handled when converting from objects. e.g. + +```js +root.ele({ number: [ "one", "two" ]}); +// with separateArrayItems: true + + + + +// with separateArrayItems: false +one +two +``` + +## [4.0.0] - 2015-11-01 +- Removed the `#list` decorator. Array items are now created as child nodes by default. +- Fixed a bug where the XML encoding string was checked partially. + +## [3.1.0] - 2015-09-19 +- `#list` decorator ignores empty arrays. + +## [3.0.0] - 2015-09-10 +- Allow `\r`, `\n` and `\t` in attribute values without escaping. See [#86](https://github.com/oozcitak/xmlbuilder-js/issues/86). + +## [2.6.5] - 2015-09-09 +- Use native `isArray` instead of lodash. +- Indentation of processing instructions are set to the parent element's. + +## [2.6.4] - 2015-05-27 +- Updated lodash dependency to 3.5.0. + +## [2.6.3] - 2015-05-27 +- Bumped version because previous release was not published on npm. + +## [2.6.2] - 2015-03-10 +- Updated lodash dependency to 3.5.0. + +## [2.6.1] - 2015-02-20 +- Updated lodash dependency to 3.3.0. + +## [2.6.0] - 2015-02-20 +- Fixed a bug where the `XMLNode` constructor overwrote the super class parent. +- Removed document property from cloned nodes. +- Switched to mocha.js for testing. + +## [2.5.2] - 2015-02-16 +- Updated lodash dependency to 3.2.0. + +## [2.5.1] - 2015-02-09 +- Updated lodash dependency to 3.1.0. +- Support all node >= 0.8. + +## [2.5.0] - 2015-00-03 +- Updated lodash dependency to 3.0.0. + +## [2.4.6] - 2015-01-26 +- Show more information from attribute creation with null values. +- Added iojs as an engine. +- Self close elements with empty text. + +## [2.4.5] - 2014-11-15 +- Fixed prepublish script to run on windows. +- Fixed bug in XMLStringifier where an undefined value was used while reporting an invalid encoding value. +- Moved require statements to the top of files to reduce lazy requires. See [#62](https://github.com/oozcitak/xmlbuilder-js/issues/62). + +## [2.4.4] - 2014-09-08 +- Added the `offset` option to `toString()` for use in XML fragments. + +## [2.4.3] - 2014-08-13 +- Corrected license in package description. + +## [2.4.2] - 2014-08-13 +- Dropped performance test and memwatch dependency. + +## [2.4.1] - 2014-08-12 +- Fixed a bug where empty indent string was omitted when pretty printing. See [#59](https://github.com/oozcitak/xmlbuilder-js/issues/59). + +## [2.4.0] - 2014-08-04 +- Correct cases of pubID and sysID. +- Use single lodash instead of separate npm modules. See [#53](https://github.com/oozcitak/xmlbuilder-js/issues/53). +- Escape according to Canonical XML 1.0. See [#54](https://github.com/oozcitak/xmlbuilder-js/issues/54). + +## [2.3.0] - 2014-07-17 +- Convert objects to JS primitives while sanitizing user input. +- Object builder preserves items with null values. See [#44](https://github.com/oozcitak/xmlbuilder-js/issues/44). +- Use modularized lodash functions to cut down dependencies. +- Process empty objects when converting from objects so that we don't throw on empty child objects. + +## [2.2.1] - 2014-04-04 +- Bumped version because previous release was not published on npm. + +## [2.2.0] - 2014-04-04 +- Switch to lodash from underscore. +- Removed legacy `ext` option from `create()`. +- Drop old node versions: 0.4, 0.5, 0.6. 0.8 is the minimum requirement from now on. + +## [2.1.0] - 2013-12-30 +- Removed duplicate null checks from constructors. +- Fixed node count in performance test. +- Added option for skipping null attribute values. See [#26](https://github.com/oozcitak/xmlbuilder-js/issues/26). +- Allow multiple values in `att()` and `ins()`. +- Added ability to run individual performance tests. +- Added flag for ignoring decorator strings. + +## [2.0.1] - 2013-12-24 +- Removed performance tests from npm package. + +## [2.0.0] - 2013-12-24 +- Combined loops for speed up. +- Added support for the DTD and XML declaration. +- `clone` includes attributes. +- Added performance tests. +- Evaluate attribute value if function. +- Evaluate instruction value if function. + +## [1.1.2] - 2013-12-11 +- Changed processing instruction decorator to `?`. + +## [1.1.1] - 2013-12-11 +- Added processing instructions to JS object conversion. + +## [1.1.0] - 2013-12-10 +- Added license to package. +- `create()` and `element()` accept JS object to fully build the document. +- Added `nod()` and `n()` aliases for `node()`. +- Renamed `convertAttChar` decorator to `convertAttKey`. +- Ignore empty decorator strings when converting JS objects. + +## [1.0.2] - 2013-11-27 +- Removed temp file which was accidentally included in the package. + +## [1.0.1] - 2013-11-27 +- Custom stringify functions affect current instance only. + +## [1.0.0] - 2013-11-27 +- Added processing instructions. +- Added stringify functions to sanitize and convert input values. +- Added option for headless XML documents. +- Added vows tests. +- Removed Makefile. Using npm publish scripts instead. +- Removed the `begin()` function. `create()` begins the document by creating the root node. + +## [0.4.3] - 2013-11-08 +- Added option to include surrogate pairs in XML content. See [#29](https://github.com/oozcitak/xmlbuilder-js/issues/29). +- Fixed empty value string representation in pretty mode. +- Added pre and postpublish scripts to package.json. +- Filtered out prototype properties when appending attributes. See [#31](https://github.com/oozcitak/xmlbuilder-js/issues/31). + +## [0.4.2] - 2012-09-14 +- Removed README.md from `.npmignore`. + +## [0.4.1] - 2012-08-31 +- Removed `begin()` calls in favor of `XMLBuilder` constructor. +- Added the `end()` function. `end()` is a convenience over `doc().toString()`. + +## [0.4.0] - 2012-08-31 +- Added arguments to `XMLBuilder` constructor to allow the name of the root element and XML prolog to be defined in one line. +- Soft deprecated `begin()`. + +## [0.3.11] - 2012-08-13 +- Package keywords are fixed to be an array of values. + +## [0.3.10] - 2012-08-13 +- Brought back npm package contents which were lost due to incorrect configuration of `package.json` in previous releases. + +## [0.3.3] - 2012-07-27 +- Implemented `importXMLBuilder()`. + +## [0.3.2] - 2012-07-20 +- Fixed a duplicated escaping problem on `element()`. +- Fixed a problem with text node creation from empty string. +- Calling `root()` on the document element returns the root element. +- `XMLBuilder` no longer extends `XMLFragment`. + +## [0.3.1] - 2011-11-28 +- Added guards for document element so that nodes cannot be inserted at document level. + +## [0.3.0] - 2011-11-28 +- Added `doc()` to return the document element. + +## [0.2.2] - 2011-11-28 +- Prevent code relying on `up()`'s older behavior to break. + +## [0.2.1] - 2011-11-28 +- Added the `root()` function. + +## [0.2.0] - 2011-11-21 +- Added Travis-CI integration. +- Added coffee-script dependency. +- Added insert, traversal and delete functions. + +## [0.1.7] - 2011-10-25 +- No changes. Accidental release. + +## [0.1.6] - 2011-10-25 +- Corrected `package.json` bugs link to `url` from `web`. + +## [0.1.5] - 2011-08-08 +- Added missing npm package contents. + +## [0.1.4] - 2011-07-29 +- Text-only nodes are no longer indented. +- Added documentation for multiple instances. + +## [0.1.3] - 2011-07-27 +- Exported the `create()` function to return a new instance. This allows multiple builder instances to be constructed. +- Fixed `u()` function so that it now correctly calls `up()`. +- Fixed typo in `element()` so that `attributes` and `text` can be passed interchangeably. + +## [0.1.2] - 2011-06-03 +- `ele()` accepts element text. +- `attributes()` now overrides existing attributes if passed the same attribute name. + +## [0.1.1] - 2011-05-19 +- Added the raw output option. +- Removed most validity checks. + +## [0.1.0] - 2011-04-27 +- `text()` and `cdata()` now return parent element. +- Attribute values are escaped. + +## [0.0.7] - 2011-04-23 +- Coerced text values to string. + +## [0.0.6] - 2011-02-23 +- Added support for XML comments. +- Text nodes are checked against CharData. + +## [0.0.5] - 2010-12-31 +- Corrected the name of the main npm module in `package.json`. + +## [0.0.4] - 2010-12-28 +- Added `.npmignore`. + +## [0.0.3] - 2010-12-27 +- root element is now constructed in `begin()`. +- moved prolog to `begin()`. +- Added the ability to have CDATA in element text. +- Removed unused prolog aliases. +- Removed `builder()` function from main module. +- Added the name of the main npm module in `package.json`. + +## [0.0.2] - 2010-11-03 +- `element()` expands nested arrays. +- Added pretty printing. +- Added the `up()`, `build()` and `prolog()` functions. +- Added readme. + +## 0.0.1 - 2010-11-02 +- Initial release + +[11.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v10.1.1...v11.0.0 +[10.1.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v10.1.0...v10.1.1 +[10.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v10.0.0...v10.1.0 +[10.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.7...v10.0.0 +[9.0.7]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.4...v9.0.7 +[9.0.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.3...v9.0.4 +[9.0.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.2...v9.0.3 +[9.0.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.1...v9.0.2 +[9.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.0...v9.0.1 +[9.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.2.2...v9.0.0 +[8.2.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.2.1...v8.2.2 +[8.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.2.0...v8.2.1 +[8.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.1.0...v8.2.0 +[8.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.0.0...v8.1.0 +[8.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v7.0.0...v8.0.0 +[7.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v6.0.0...v7.0.0 +[6.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v5.0.1...v6.0.0 +[5.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v5.0.0...v5.0.1 +[5.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.2.1...v5.0.0 +[4.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.2.0...v4.2.1 +[4.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.1.0...v4.2.0 +[4.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.0.0...v4.1.0 +[4.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v3.1.0...v4.0.0 +[3.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v3.0.0...v3.1.0 +[3.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.5...v3.0.0 +[2.6.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.4...v2.6.5 +[2.6.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.3...v2.6.4 +[2.6.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.2...v2.6.3 +[2.6.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.1...v2.6.2 +[2.6.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.0...v2.6.1 +[2.6.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.5.2...v2.6.0 +[2.5.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.5.1...v2.5.2 +[2.5.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.5.0...v2.5.1 +[2.5.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.6...v2.5.0 +[2.4.6]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.5...v2.4.6 +[2.4.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.4...v2.4.5 +[2.4.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.3...v2.4.4 +[2.4.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.2...v2.4.3 +[2.4.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.1...v2.4.2 +[2.4.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.0...v2.4.1 +[2.4.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.3.0...v2.4.0 +[2.3.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.2.1...v2.3.0 +[2.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.2.0...v2.2.1 +[2.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.1.0...v2.2.0 +[2.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.0.1...v2.1.0 +[2.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.0.0...v2.0.1 +[2.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.1.2...v2.0.0 +[1.1.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.1.1...v1.1.2 +[1.1.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.1.0...v1.1.1 +[1.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.0.2...v1.1.0 +[1.0.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.0.1...v1.0.2 +[1.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.0.0...v1.0.1 +[1.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.3...v1.0.0 +[0.4.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.2...v0.4.3 +[0.4.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.1...v0.4.2 +[0.4.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.0...v0.4.1 +[0.4.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.11...v0.4.0 +[0.3.11]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.10...v0.3.11 +[0.3.10]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.3...v0.3.10 +[0.3.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.2...v0.3.3 +[0.3.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.1...v0.3.2 +[0.3.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.0...v0.3.1 +[0.3.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.2.2...v0.3.0 +[0.2.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.2.1...v0.2.2 +[0.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.2.0...v0.2.1 +[0.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.7...v0.2.0 +[0.1.7]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.6...v0.1.7 +[0.1.6]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.5...v0.1.6 +[0.1.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.4...v0.1.5 +[0.1.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.3...v0.1.4 +[0.1.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.2...v0.1.3 +[0.1.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.1...v0.1.2 +[0.1.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.0...v0.1.1 +[0.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.7...v0.1.0 +[0.0.7]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.6...v0.0.7 +[0.0.6]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.5...v0.0.6 +[0.0.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.4...v0.0.5 +[0.0.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.3...v0.0.4 +[0.0.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.2...v0.0.3 +[0.0.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.1...v0.0.2 + diff --git a/node_modules/xmlbuilder/LICENSE b/node_modules/xmlbuilder/LICENSE new file mode 100644 index 0000000..9fb9700 --- /dev/null +++ b/node_modules/xmlbuilder/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Ozgur Ozcitak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xmlbuilder/README.md b/node_modules/xmlbuilder/README.md new file mode 100644 index 0000000..1a96edb --- /dev/null +++ b/node_modules/xmlbuilder/README.md @@ -0,0 +1,86 @@ +# xmlbuilder-js + +An XML builder for [node.js](https://nodejs.org/) similar to +[java-xmlbuilder](https://github.com/jmurty/java-xmlbuilder). + +[![License](http://img.shields.io/npm/l/xmlbuilder.svg?style=flat-square)](http://opensource.org/licenses/MIT) +[![NPM Version](http://img.shields.io/npm/v/xmlbuilder.svg?style=flat-square)](https://npmjs.com/package/xmlbuilder) +[![NPM Downloads](https://img.shields.io/npm/dm/xmlbuilder.svg?style=flat-square)](https://npmjs.com/package/xmlbuilder) + +[![Travis Build Status](http://img.shields.io/travis/oozcitak/xmlbuilder-js.svg?style=flat-square)](http://travis-ci.org/oozcitak/xmlbuilder-js) +[![AppVeyor Build status](https://ci.appveyor.com/api/projects/status/bf7odb20hj77isry?svg=true)](https://ci.appveyor.com/project/oozcitak/xmlbuilder-js) +[![Dev Dependency Status](http://img.shields.io/david/dev/oozcitak/xmlbuilder-js.svg?style=flat-square)](https://david-dm.org/oozcitak/xmlbuilder-js) +[![Code Coverage](https://img.shields.io/coveralls/oozcitak/xmlbuilder-js.svg?style=flat-square)](https://coveralls.io/github/oozcitak/xmlbuilder-js) + +### Installation: + +``` sh +npm install xmlbuilder +``` + +### Usage: + +``` js +var builder = require('xmlbuilder'); +var xml = builder.create('root') + .ele('xmlbuilder') + .ele('repo', {'type': 'git'}, 'git://github.com/oozcitak/xmlbuilder-js.git') + .end({ pretty: true}); + +console.log(xml); +``` + +will result in: + +``` xml + + + + git://github.com/oozcitak/xmlbuilder-js.git + + +``` + +It is also possible to convert objects into nodes: + +``` js +builder.create({ + root: { + xmlbuilder: { + repo: { + '@type': 'git', // attributes start with @ + '#text': 'git://github.com/oozcitak/xmlbuilder-js.git' // text node + } + } + } +}); +``` + +If you need to do some processing: + +``` js +var root = builder.create('squares'); +root.com('f(x) = x^2'); +for(var i = 1; i <= 5; i++) +{ + var item = root.ele('data'); + item.att('x', i); + item.att('y', i * i); +} +``` + +This will result in: + +``` xml + + + + + + + + + +``` + +See the [wiki](https://github.com/oozcitak/xmlbuilder-js/wiki) for details and [examples](https://github.com/oozcitak/xmlbuilder-js/wiki/Examples) for more complex examples. diff --git a/node_modules/xmlbuilder/appveyor.yml b/node_modules/xmlbuilder/appveyor.yml new file mode 100644 index 0000000..9604b78 --- /dev/null +++ b/node_modules/xmlbuilder/appveyor.yml @@ -0,0 +1,20 @@ +environment: + matrix: + - nodejs_version: "4" + - nodejs_version: "5" + - nodejs_version: "6" + - nodejs_version: "8" + - nodejs_version: "10" + - nodejs_version: "" # latest + +install: + - ps: "Install-Product node $env:nodejs_version" + - "npm install" + +test_script: + - "node --version" + - "npm --version" + - "npm test" + +build: off + diff --git a/node_modules/xmlbuilder/lib/Derivation.js b/node_modules/xmlbuilder/lib/Derivation.js new file mode 100644 index 0000000..2abfd08 --- /dev/null +++ b/node_modules/xmlbuilder/lib/Derivation.js @@ -0,0 +1,10 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Restriction: 1, + Extension: 2, + Union: 4, + List: 8 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/DocumentPosition.js b/node_modules/xmlbuilder/lib/DocumentPosition.js new file mode 100644 index 0000000..1cbd21c --- /dev/null +++ b/node_modules/xmlbuilder/lib/DocumentPosition.js @@ -0,0 +1,12 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Disconnected: 1, + Preceding: 2, + Following: 4, + Contains: 8, + ContainedBy: 16, + ImplementationSpecific: 32 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/NodeType.js b/node_modules/xmlbuilder/lib/NodeType.js new file mode 100644 index 0000000..4c200e3 --- /dev/null +++ b/node_modules/xmlbuilder/lib/NodeType.js @@ -0,0 +1,23 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Element: 1, + Attribute: 2, + Text: 3, + CData: 4, + EntityReference: 5, + EntityDeclaration: 6, + ProcessingInstruction: 7, + Comment: 8, + Document: 9, + DocType: 10, + DocumentFragment: 11, + NotationDeclaration: 12, + Declaration: 201, + Raw: 202, + AttributeDeclaration: 203, + ElementDeclaration: 204, + Dummy: 205 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/OperationType.js b/node_modules/xmlbuilder/lib/OperationType.js new file mode 100644 index 0000000..29428f6 --- /dev/null +++ b/node_modules/xmlbuilder/lib/OperationType.js @@ -0,0 +1,11 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Clones: 1, + Imported: 2, + Deleted: 3, + Renamed: 4, + Adopted: 5 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/Utility.js b/node_modules/xmlbuilder/lib/Utility.js new file mode 100644 index 0000000..1d42cfd --- /dev/null +++ b/node_modules/xmlbuilder/lib/Utility.js @@ -0,0 +1,83 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var assign, getValue, isArray, isEmpty, isFunction, isObject, isPlainObject, + slice = [].slice, + hasProp = {}.hasOwnProperty; + + assign = function() { + var i, key, len, source, sources, target; + target = arguments[0], sources = 2 <= arguments.length ? slice.call(arguments, 1) : []; + if (isFunction(Object.assign)) { + Object.assign.apply(null, arguments); + } else { + for (i = 0, len = sources.length; i < len; i++) { + source = sources[i]; + if (source != null) { + for (key in source) { + if (!hasProp.call(source, key)) continue; + target[key] = source[key]; + } + } + } + } + return target; + }; + + isFunction = function(val) { + return !!val && Object.prototype.toString.call(val) === '[object Function]'; + }; + + isObject = function(val) { + var ref; + return !!val && ((ref = typeof val) === 'function' || ref === 'object'); + }; + + isArray = function(val) { + if (isFunction(Array.isArray)) { + return Array.isArray(val); + } else { + return Object.prototype.toString.call(val) === '[object Array]'; + } + }; + + isEmpty = function(val) { + var key; + if (isArray(val)) { + return !val.length; + } else { + for (key in val) { + if (!hasProp.call(val, key)) continue; + return false; + } + return true; + } + }; + + isPlainObject = function(val) { + var ctor, proto; + return isObject(val) && (proto = Object.getPrototypeOf(val)) && (ctor = proto.constructor) && (typeof ctor === 'function') && (ctor instanceof ctor) && (Function.prototype.toString.call(ctor) === Function.prototype.toString.call(Object)); + }; + + getValue = function(obj) { + if (isFunction(obj.valueOf)) { + return obj.valueOf(); + } else { + return obj; + } + }; + + module.exports.assign = assign; + + module.exports.isFunction = isFunction; + + module.exports.isObject = isObject; + + module.exports.isArray = isArray; + + module.exports.isEmpty = isEmpty; + + module.exports.isPlainObject = isPlainObject; + + module.exports.getValue = getValue; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/WriterState.js b/node_modules/xmlbuilder/lib/WriterState.js new file mode 100644 index 0000000..0923eec --- /dev/null +++ b/node_modules/xmlbuilder/lib/WriterState.js @@ -0,0 +1,10 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + None: 0, + OpenTag: 1, + InsideTag: 2, + CloseTag: 3 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLAttribute.js b/node_modules/xmlbuilder/lib/XMLAttribute.js new file mode 100644 index 0000000..c208566 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLAttribute.js @@ -0,0 +1,108 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLAttribute, XMLNode; + + NodeType = require('./NodeType'); + + XMLNode = require('./XMLNode'); + + module.exports = XMLAttribute = (function() { + function XMLAttribute(parent, name, value) { + this.parent = parent; + if (this.parent) { + this.options = this.parent.options; + this.stringify = this.parent.stringify; + } + if (name == null) { + throw new Error("Missing attribute name. " + this.debugInfo(name)); + } + this.name = this.stringify.name(name); + this.value = this.stringify.attValue(value); + this.type = NodeType.Attribute; + this.isId = false; + this.schemaTypeInfo = null; + } + + Object.defineProperty(XMLAttribute.prototype, 'nodeType', { + get: function() { + return this.type; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'ownerElement', { + get: function() { + return this.parent; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'textContent', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'namespaceURI', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'prefix', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'localName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'specified', { + get: function() { + return true; + } + }); + + XMLAttribute.prototype.clone = function() { + return Object.create(this); + }; + + XMLAttribute.prototype.toString = function(options) { + return this.options.writer.attribute(this, this.options.writer.filterOptions(options)); + }; + + XMLAttribute.prototype.debugInfo = function(name) { + name = name || this.name; + if (name == null) { + return "parent: <" + this.parent.name + ">"; + } else { + return "attribute: {" + name + "}, parent: <" + this.parent.name + ">"; + } + }; + + XMLAttribute.prototype.isEqualNode = function(node) { + if (node.namespaceURI !== this.namespaceURI) { + return false; + } + if (node.prefix !== this.prefix) { + return false; + } + if (node.localName !== this.localName) { + return false; + } + if (node.value !== this.value) { + return false; + } + return true; + }; + + return XMLAttribute; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLCData.js b/node_modules/xmlbuilder/lib/XMLCData.js new file mode 100644 index 0000000..c732ec5 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLCData.js @@ -0,0 +1,36 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCData, XMLCharacterData, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLCharacterData = require('./XMLCharacterData'); + + module.exports = XMLCData = (function(superClass) { + extend(XMLCData, superClass); + + function XMLCData(parent, text) { + XMLCData.__super__.constructor.call(this, parent); + if (text == null) { + throw new Error("Missing CDATA text. " + this.debugInfo()); + } + this.name = "#cdata-section"; + this.type = NodeType.CData; + this.value = this.stringify.cdata(text); + } + + XMLCData.prototype.clone = function() { + return Object.create(this); + }; + + XMLCData.prototype.toString = function(options) { + return this.options.writer.cdata(this, this.options.writer.filterOptions(options)); + }; + + return XMLCData; + + })(XMLCharacterData); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLCharacterData.js b/node_modules/xmlbuilder/lib/XMLCharacterData.js new file mode 100644 index 0000000..c007a18 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLCharacterData.js @@ -0,0 +1,79 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLCharacterData, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + module.exports = XMLCharacterData = (function(superClass) { + extend(XMLCharacterData, superClass); + + function XMLCharacterData(parent) { + XMLCharacterData.__super__.constructor.call(this, parent); + this.value = ''; + } + + Object.defineProperty(XMLCharacterData.prototype, 'data', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + Object.defineProperty(XMLCharacterData.prototype, 'length', { + get: function() { + return this.value.length; + } + }); + + Object.defineProperty(XMLCharacterData.prototype, 'textContent', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + XMLCharacterData.prototype.clone = function() { + return Object.create(this); + }; + + XMLCharacterData.prototype.substringData = function(offset, count) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.appendData = function(arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.insertData = function(offset, arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.deleteData = function(offset, count) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.replaceData = function(offset, count, arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.isEqualNode = function(node) { + if (!XMLCharacterData.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.data !== this.data) { + return false; + } + return true; + }; + + return XMLCharacterData; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLComment.js b/node_modules/xmlbuilder/lib/XMLComment.js new file mode 100644 index 0000000..8287216 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLComment.js @@ -0,0 +1,36 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCharacterData, XMLComment, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLCharacterData = require('./XMLCharacterData'); + + module.exports = XMLComment = (function(superClass) { + extend(XMLComment, superClass); + + function XMLComment(parent, text) { + XMLComment.__super__.constructor.call(this, parent); + if (text == null) { + throw new Error("Missing comment text. " + this.debugInfo()); + } + this.name = "#comment"; + this.type = NodeType.Comment; + this.value = this.stringify.comment(text); + } + + XMLComment.prototype.clone = function() { + return Object.create(this); + }; + + XMLComment.prototype.toString = function(options) { + return this.options.writer.comment(this, this.options.writer.filterOptions(options)); + }; + + return XMLComment; + + })(XMLCharacterData); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMConfiguration.js b/node_modules/xmlbuilder/lib/XMLDOMConfiguration.js new file mode 100644 index 0000000..b331b86 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMConfiguration.js @@ -0,0 +1,64 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMConfiguration, XMLDOMErrorHandler, XMLDOMStringList; + + XMLDOMErrorHandler = require('./XMLDOMErrorHandler'); + + XMLDOMStringList = require('./XMLDOMStringList'); + + module.exports = XMLDOMConfiguration = (function() { + function XMLDOMConfiguration() { + var clonedSelf; + this.defaultParams = { + "canonical-form": false, + "cdata-sections": false, + "comments": false, + "datatype-normalization": false, + "element-content-whitespace": true, + "entities": true, + "error-handler": new XMLDOMErrorHandler(), + "infoset": true, + "validate-if-schema": false, + "namespaces": true, + "namespace-declarations": true, + "normalize-characters": false, + "schema-location": '', + "schema-type": '', + "split-cdata-sections": true, + "validate": false, + "well-formed": true + }; + this.params = clonedSelf = Object.create(this.defaultParams); + } + + Object.defineProperty(XMLDOMConfiguration.prototype, 'parameterNames', { + get: function() { + return new XMLDOMStringList(Object.keys(this.defaultParams)); + } + }); + + XMLDOMConfiguration.prototype.getParameter = function(name) { + if (this.params.hasOwnProperty(name)) { + return this.params[name]; + } else { + return null; + } + }; + + XMLDOMConfiguration.prototype.canSetParameter = function(name, value) { + return true; + }; + + XMLDOMConfiguration.prototype.setParameter = function(name, value) { + if (value != null) { + return this.params[name] = value; + } else { + return delete this.params[name]; + } + }; + + return XMLDOMConfiguration; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js b/node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js new file mode 100644 index 0000000..4a0446c --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js @@ -0,0 +1,16 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMErrorHandler; + + module.exports = XMLDOMErrorHandler = (function() { + function XMLDOMErrorHandler() {} + + XMLDOMErrorHandler.prototype.handleError = function(error) { + throw new Error(error); + }; + + return XMLDOMErrorHandler; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMImplementation.js b/node_modules/xmlbuilder/lib/XMLDOMImplementation.js new file mode 100644 index 0000000..4f9f9db --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMImplementation.js @@ -0,0 +1,32 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMImplementation; + + module.exports = XMLDOMImplementation = (function() { + function XMLDOMImplementation() {} + + XMLDOMImplementation.prototype.hasFeature = function(feature, version) { + return true; + }; + + XMLDOMImplementation.prototype.createDocumentType = function(qualifiedName, publicId, systemId) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.createDocument = function(namespaceURI, qualifiedName, doctype) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.createHTMLDocument = function(title) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.getFeature = function(feature, version) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLDOMImplementation; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMStringList.js b/node_modules/xmlbuilder/lib/XMLDOMStringList.js new file mode 100644 index 0000000..ba558c5 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMStringList.js @@ -0,0 +1,28 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMStringList; + + module.exports = XMLDOMStringList = (function() { + function XMLDOMStringList(arr) { + this.arr = arr || []; + } + + Object.defineProperty(XMLDOMStringList.prototype, 'length', { + get: function() { + return this.arr.length; + } + }); + + XMLDOMStringList.prototype.item = function(index) { + return this.arr[index] || null; + }; + + XMLDOMStringList.prototype.contains = function(str) { + return this.arr.indexOf(str) !== -1; + }; + + return XMLDOMStringList; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDAttList.js b/node_modules/xmlbuilder/lib/XMLDTDAttList.js new file mode 100644 index 0000000..aca9dbd --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDAttList.js @@ -0,0 +1,55 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDAttList, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDAttList = (function(superClass) { + extend(XMLDTDAttList, superClass); + + function XMLDTDAttList(parent, elementName, attributeName, attributeType, defaultValueType, defaultValue) { + XMLDTDAttList.__super__.constructor.call(this, parent); + if (elementName == null) { + throw new Error("Missing DTD element name. " + this.debugInfo()); + } + if (attributeName == null) { + throw new Error("Missing DTD attribute name. " + this.debugInfo(elementName)); + } + if (!attributeType) { + throw new Error("Missing DTD attribute type. " + this.debugInfo(elementName)); + } + if (!defaultValueType) { + throw new Error("Missing DTD attribute default. " + this.debugInfo(elementName)); + } + if (defaultValueType.indexOf('#') !== 0) { + defaultValueType = '#' + defaultValueType; + } + if (!defaultValueType.match(/^(#REQUIRED|#IMPLIED|#FIXED|#DEFAULT)$/)) { + throw new Error("Invalid default value type; expected: #REQUIRED, #IMPLIED, #FIXED or #DEFAULT. " + this.debugInfo(elementName)); + } + if (defaultValue && !defaultValueType.match(/^(#FIXED|#DEFAULT)$/)) { + throw new Error("Default value only applies to #FIXED or #DEFAULT. " + this.debugInfo(elementName)); + } + this.elementName = this.stringify.name(elementName); + this.type = NodeType.AttributeDeclaration; + this.attributeName = this.stringify.name(attributeName); + this.attributeType = this.stringify.dtdAttType(attributeType); + if (defaultValue) { + this.defaultValue = this.stringify.dtdAttDefault(defaultValue); + } + this.defaultValueType = defaultValueType; + } + + XMLDTDAttList.prototype.toString = function(options) { + return this.options.writer.dtdAttList(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDAttList; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDElement.js b/node_modules/xmlbuilder/lib/XMLDTDElement.js new file mode 100644 index 0000000..f8f1ae7 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDElement.js @@ -0,0 +1,38 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDElement, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDElement = (function(superClass) { + extend(XMLDTDElement, superClass); + + function XMLDTDElement(parent, name, value) { + XMLDTDElement.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD element name. " + this.debugInfo()); + } + if (!value) { + value = '(#PCDATA)'; + } + if (Array.isArray(value)) { + value = '(' + value.join(',') + ')'; + } + this.name = this.stringify.name(name); + this.type = NodeType.ElementDeclaration; + this.value = this.stringify.dtdElementValue(value); + } + + XMLDTDElement.prototype.toString = function(options) { + return this.options.writer.dtdElement(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDElement; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDEntity.js b/node_modules/xmlbuilder/lib/XMLDTDEntity.js new file mode 100644 index 0000000..0a940d6 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDEntity.js @@ -0,0 +1,97 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDEntity, XMLNode, isObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isObject = require('./Utility').isObject; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDEntity = (function(superClass) { + extend(XMLDTDEntity, superClass); + + function XMLDTDEntity(parent, pe, name, value) { + XMLDTDEntity.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD entity name. " + this.debugInfo(name)); + } + if (value == null) { + throw new Error("Missing DTD entity value. " + this.debugInfo(name)); + } + this.pe = !!pe; + this.name = this.stringify.name(name); + this.type = NodeType.EntityDeclaration; + if (!isObject(value)) { + this.value = this.stringify.dtdEntityValue(value); + this.internal = true; + } else { + if (!value.pubID && !value.sysID) { + throw new Error("Public and/or system identifiers are required for an external entity. " + this.debugInfo(name)); + } + if (value.pubID && !value.sysID) { + throw new Error("System identifier is required for a public external entity. " + this.debugInfo(name)); + } + this.internal = false; + if (value.pubID != null) { + this.pubID = this.stringify.dtdPubID(value.pubID); + } + if (value.sysID != null) { + this.sysID = this.stringify.dtdSysID(value.sysID); + } + if (value.nData != null) { + this.nData = this.stringify.dtdNData(value.nData); + } + if (this.pe && this.nData) { + throw new Error("Notation declaration is not allowed in a parameter entity. " + this.debugInfo(name)); + } + } + } + + Object.defineProperty(XMLDTDEntity.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'notationName', { + get: function() { + return this.nData || null; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'inputEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'xmlEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'xmlVersion', { + get: function() { + return null; + } + }); + + XMLDTDEntity.prototype.toString = function(options) { + return this.options.writer.dtdEntity(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDEntity; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDNotation.js b/node_modules/xmlbuilder/lib/XMLDTDNotation.js new file mode 100644 index 0000000..57a119d --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDNotation.js @@ -0,0 +1,52 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDNotation, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDNotation = (function(superClass) { + extend(XMLDTDNotation, superClass); + + function XMLDTDNotation(parent, name, value) { + XMLDTDNotation.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD notation name. " + this.debugInfo(name)); + } + if (!value.pubID && !value.sysID) { + throw new Error("Public or system identifiers are required for an external entity. " + this.debugInfo(name)); + } + this.name = this.stringify.name(name); + this.type = NodeType.NotationDeclaration; + if (value.pubID != null) { + this.pubID = this.stringify.dtdPubID(value.pubID); + } + if (value.sysID != null) { + this.sysID = this.stringify.dtdSysID(value.sysID); + } + } + + Object.defineProperty(XMLDTDNotation.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDTDNotation.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + XMLDTDNotation.prototype.toString = function(options) { + return this.options.writer.dtdNotation(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDNotation; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDeclaration.js b/node_modules/xmlbuilder/lib/XMLDeclaration.js new file mode 100644 index 0000000..d4f7f44 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDeclaration.js @@ -0,0 +1,43 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDeclaration, XMLNode, isObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isObject = require('./Utility').isObject; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDeclaration = (function(superClass) { + extend(XMLDeclaration, superClass); + + function XMLDeclaration(parent, version, encoding, standalone) { + var ref; + XMLDeclaration.__super__.constructor.call(this, parent); + if (isObject(version)) { + ref = version, version = ref.version, encoding = ref.encoding, standalone = ref.standalone; + } + if (!version) { + version = '1.0'; + } + this.type = NodeType.Declaration; + this.version = this.stringify.xmlVersion(version); + if (encoding != null) { + this.encoding = this.stringify.xmlEncoding(encoding); + } + if (standalone != null) { + this.standalone = this.stringify.xmlStandalone(standalone); + } + } + + XMLDeclaration.prototype.toString = function(options) { + return this.options.writer.declaration(this, this.options.writer.filterOptions(options)); + }; + + return XMLDeclaration; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocType.js b/node_modules/xmlbuilder/lib/XMLDocType.js new file mode 100644 index 0000000..ef043f4 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocType.js @@ -0,0 +1,186 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDocType, XMLNamedNodeMap, XMLNode, isObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isObject = require('./Utility').isObject; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + XMLDTDAttList = require('./XMLDTDAttList'); + + XMLDTDEntity = require('./XMLDTDEntity'); + + XMLDTDElement = require('./XMLDTDElement'); + + XMLDTDNotation = require('./XMLDTDNotation'); + + XMLNamedNodeMap = require('./XMLNamedNodeMap'); + + module.exports = XMLDocType = (function(superClass) { + extend(XMLDocType, superClass); + + function XMLDocType(parent, pubID, sysID) { + var child, i, len, ref, ref1, ref2; + XMLDocType.__super__.constructor.call(this, parent); + this.type = NodeType.DocType; + if (parent.children) { + ref = parent.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.Element) { + this.name = child.name; + break; + } + } + } + this.documentObject = parent; + if (isObject(pubID)) { + ref1 = pubID, pubID = ref1.pubID, sysID = ref1.sysID; + } + if (sysID == null) { + ref2 = [pubID, sysID], sysID = ref2[0], pubID = ref2[1]; + } + if (pubID != null) { + this.pubID = this.stringify.dtdPubID(pubID); + } + if (sysID != null) { + this.sysID = this.stringify.dtdSysID(sysID); + } + } + + Object.defineProperty(XMLDocType.prototype, 'entities', { + get: function() { + var child, i, len, nodes, ref; + nodes = {}; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if ((child.type === NodeType.EntityDeclaration) && !child.pe) { + nodes[child.name] = child; + } + } + return new XMLNamedNodeMap(nodes); + } + }); + + Object.defineProperty(XMLDocType.prototype, 'notations', { + get: function() { + var child, i, len, nodes, ref; + nodes = {}; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.NotationDeclaration) { + nodes[child.name] = child; + } + } + return new XMLNamedNodeMap(nodes); + } + }); + + Object.defineProperty(XMLDocType.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDocType.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + Object.defineProperty(XMLDocType.prototype, 'internalSubset', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + XMLDocType.prototype.element = function(name, value) { + var child; + child = new XMLDTDElement(this, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.attList = function(elementName, attributeName, attributeType, defaultValueType, defaultValue) { + var child; + child = new XMLDTDAttList(this, elementName, attributeName, attributeType, defaultValueType, defaultValue); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.entity = function(name, value) { + var child; + child = new XMLDTDEntity(this, false, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.pEntity = function(name, value) { + var child; + child = new XMLDTDEntity(this, true, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.notation = function(name, value) { + var child; + child = new XMLDTDNotation(this, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.toString = function(options) { + return this.options.writer.docType(this, this.options.writer.filterOptions(options)); + }; + + XMLDocType.prototype.ele = function(name, value) { + return this.element(name, value); + }; + + XMLDocType.prototype.att = function(elementName, attributeName, attributeType, defaultValueType, defaultValue) { + return this.attList(elementName, attributeName, attributeType, defaultValueType, defaultValue); + }; + + XMLDocType.prototype.ent = function(name, value) { + return this.entity(name, value); + }; + + XMLDocType.prototype.pent = function(name, value) { + return this.pEntity(name, value); + }; + + XMLDocType.prototype.not = function(name, value) { + return this.notation(name, value); + }; + + XMLDocType.prototype.up = function() { + return this.root() || this.documentObject; + }; + + XMLDocType.prototype.isEqualNode = function(node) { + if (!XMLDocType.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.name !== this.name) { + return false; + } + if (node.publicId !== this.publicId) { + return false; + } + if (node.systemId !== this.systemId) { + return false; + } + return true; + }; + + return XMLDocType; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocument.js b/node_modules/xmlbuilder/lib/XMLDocument.js new file mode 100644 index 0000000..88df56c --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocument.js @@ -0,0 +1,242 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDOMConfiguration, XMLDOMImplementation, XMLDocument, XMLNode, XMLStringWriter, XMLStringifier, isPlainObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isPlainObject = require('./Utility').isPlainObject; + + XMLDOMImplementation = require('./XMLDOMImplementation'); + + XMLDOMConfiguration = require('./XMLDOMConfiguration'); + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + XMLStringifier = require('./XMLStringifier'); + + XMLStringWriter = require('./XMLStringWriter'); + + module.exports = XMLDocument = (function(superClass) { + extend(XMLDocument, superClass); + + function XMLDocument(options) { + XMLDocument.__super__.constructor.call(this, null); + this.name = "#document"; + this.type = NodeType.Document; + this.documentURI = null; + this.domConfig = new XMLDOMConfiguration(); + options || (options = {}); + if (!options.writer) { + options.writer = new XMLStringWriter(); + } + this.options = options; + this.stringify = new XMLStringifier(options); + } + + Object.defineProperty(XMLDocument.prototype, 'implementation', { + value: new XMLDOMImplementation() + }); + + Object.defineProperty(XMLDocument.prototype, 'doctype', { + get: function() { + var child, i, len, ref; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.DocType) { + return child; + } + } + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'documentElement', { + get: function() { + return this.rootObject || null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'inputEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'strictErrorChecking', { + get: function() { + return false; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlEncoding', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].encoding; + } else { + return null; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlStandalone', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].standalone === 'yes'; + } else { + return false; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlVersion', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].version; + } else { + return "1.0"; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'URL', { + get: function() { + return this.documentURI; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'origin', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'compatMode', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'characterSet', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'contentType', { + get: function() { + return null; + } + }); + + XMLDocument.prototype.end = function(writer) { + var writerOptions; + writerOptions = {}; + if (!writer) { + writer = this.options.writer; + } else if (isPlainObject(writer)) { + writerOptions = writer; + writer = this.options.writer; + } + return writer.document(this, writer.filterOptions(writerOptions)); + }; + + XMLDocument.prototype.toString = function(options) { + return this.options.writer.document(this, this.options.writer.filterOptions(options)); + }; + + XMLDocument.prototype.createElement = function(tagName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createDocumentFragment = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createTextNode = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createComment = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createCDATASection = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createProcessingInstruction = function(target, data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createAttribute = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createEntityReference = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByTagName = function(tagname) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.importNode = function(importedNode, deep) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createElementNS = function(namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createAttributeNS = function(namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementById = function(elementId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.adoptNode = function(source) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.normalizeDocument = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.renameNode = function(node, namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByClassName = function(classNames) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createEvent = function(eventInterface) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createRange = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createNodeIterator = function(root, whatToShow, filter) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createTreeWalker = function(root, whatToShow, filter) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + return XMLDocument; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocumentCB.js b/node_modules/xmlbuilder/lib/XMLDocumentCB.js new file mode 100644 index 0000000..ca1aa1c --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocumentCB.js @@ -0,0 +1,528 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLAttribute, XMLCData, XMLComment, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDeclaration, XMLDocType, XMLDocument, XMLDocumentCB, XMLElement, XMLProcessingInstruction, XMLRaw, XMLStringWriter, XMLStringifier, XMLText, getValue, isFunction, isObject, isPlainObject, ref, + hasProp = {}.hasOwnProperty; + + ref = require('./Utility'), isObject = ref.isObject, isFunction = ref.isFunction, isPlainObject = ref.isPlainObject, getValue = ref.getValue; + + NodeType = require('./NodeType'); + + XMLDocument = require('./XMLDocument'); + + XMLElement = require('./XMLElement'); + + XMLCData = require('./XMLCData'); + + XMLComment = require('./XMLComment'); + + XMLRaw = require('./XMLRaw'); + + XMLText = require('./XMLText'); + + XMLProcessingInstruction = require('./XMLProcessingInstruction'); + + XMLDeclaration = require('./XMLDeclaration'); + + XMLDocType = require('./XMLDocType'); + + XMLDTDAttList = require('./XMLDTDAttList'); + + XMLDTDEntity = require('./XMLDTDEntity'); + + XMLDTDElement = require('./XMLDTDElement'); + + XMLDTDNotation = require('./XMLDTDNotation'); + + XMLAttribute = require('./XMLAttribute'); + + XMLStringifier = require('./XMLStringifier'); + + XMLStringWriter = require('./XMLStringWriter'); + + WriterState = require('./WriterState'); + + module.exports = XMLDocumentCB = (function() { + function XMLDocumentCB(options, onData, onEnd) { + var writerOptions; + this.name = "?xml"; + this.type = NodeType.Document; + options || (options = {}); + writerOptions = {}; + if (!options.writer) { + options.writer = new XMLStringWriter(); + } else if (isPlainObject(options.writer)) { + writerOptions = options.writer; + options.writer = new XMLStringWriter(); + } + this.options = options; + this.writer = options.writer; + this.writerOptions = this.writer.filterOptions(writerOptions); + this.stringify = new XMLStringifier(options); + this.onDataCallback = onData || function() {}; + this.onEndCallback = onEnd || function() {}; + this.currentNode = null; + this.currentLevel = -1; + this.openTags = {}; + this.documentStarted = false; + this.documentCompleted = false; + this.root = null; + } + + XMLDocumentCB.prototype.createChildNode = function(node) { + var att, attName, attributes, child, i, len, ref1, ref2; + switch (node.type) { + case NodeType.CData: + this.cdata(node.value); + break; + case NodeType.Comment: + this.comment(node.value); + break; + case NodeType.Element: + attributes = {}; + ref1 = node.attribs; + for (attName in ref1) { + if (!hasProp.call(ref1, attName)) continue; + att = ref1[attName]; + attributes[attName] = att.value; + } + this.node(node.name, attributes); + break; + case NodeType.Dummy: + this.dummy(); + break; + case NodeType.Raw: + this.raw(node.value); + break; + case NodeType.Text: + this.text(node.value); + break; + case NodeType.ProcessingInstruction: + this.instruction(node.target, node.value); + break; + default: + throw new Error("This XML node type is not supported in a JS object: " + node.constructor.name); + } + ref2 = node.children; + for (i = 0, len = ref2.length; i < len; i++) { + child = ref2[i]; + this.createChildNode(child); + if (child.type === NodeType.Element) { + this.up(); + } + } + return this; + }; + + XMLDocumentCB.prototype.dummy = function() { + return this; + }; + + XMLDocumentCB.prototype.node = function(name, attributes, text) { + var ref1; + if (name == null) { + throw new Error("Missing node name."); + } + if (this.root && this.currentLevel === -1) { + throw new Error("Document can only have one root node. " + this.debugInfo(name)); + } + this.openCurrent(); + name = getValue(name); + if (attributes == null) { + attributes = {}; + } + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref1 = [attributes, text], text = ref1[0], attributes = ref1[1]; + } + this.currentNode = new XMLElement(this, name, attributes); + this.currentNode.children = false; + this.currentLevel++; + this.openTags[this.currentLevel] = this.currentNode; + if (text != null) { + this.text(text); + } + return this; + }; + + XMLDocumentCB.prototype.element = function(name, attributes, text) { + var child, i, len, oldValidationFlag, ref1, root; + if (this.currentNode && this.currentNode.type === NodeType.DocType) { + this.dtdElement.apply(this, arguments); + } else { + if (Array.isArray(name) || isObject(name) || isFunction(name)) { + oldValidationFlag = this.options.noValidation; + this.options.noValidation = true; + root = new XMLDocument(this.options).element('TEMP_ROOT'); + root.element(name); + this.options.noValidation = oldValidationFlag; + ref1 = root.children; + for (i = 0, len = ref1.length; i < len; i++) { + child = ref1[i]; + this.createChildNode(child); + if (child.type === NodeType.Element) { + this.up(); + } + } + } else { + this.node(name, attributes, text); + } + } + return this; + }; + + XMLDocumentCB.prototype.attribute = function(name, value) { + var attName, attValue; + if (!this.currentNode || this.currentNode.children) { + throw new Error("att() can only be used immediately after an ele() call in callback mode. " + this.debugInfo(name)); + } + if (name != null) { + name = getValue(name); + } + if (isObject(name)) { + for (attName in name) { + if (!hasProp.call(name, attName)) continue; + attValue = name[attName]; + this.attribute(attName, attValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + if (this.options.keepNullAttributes && (value == null)) { + this.currentNode.attribs[name] = new XMLAttribute(this, name, ""); + } else if (value != null) { + this.currentNode.attribs[name] = new XMLAttribute(this, name, value); + } + } + return this; + }; + + XMLDocumentCB.prototype.text = function(value) { + var node; + this.openCurrent(); + node = new XMLText(this, value); + this.onData(this.writer.text(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.cdata = function(value) { + var node; + this.openCurrent(); + node = new XMLCData(this, value); + this.onData(this.writer.cdata(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.comment = function(value) { + var node; + this.openCurrent(); + node = new XMLComment(this, value); + this.onData(this.writer.comment(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.raw = function(value) { + var node; + this.openCurrent(); + node = new XMLRaw(this, value); + this.onData(this.writer.raw(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.instruction = function(target, value) { + var i, insTarget, insValue, len, node; + this.openCurrent(); + if (target != null) { + target = getValue(target); + } + if (value != null) { + value = getValue(value); + } + if (Array.isArray(target)) { + for (i = 0, len = target.length; i < len; i++) { + insTarget = target[i]; + this.instruction(insTarget); + } + } else if (isObject(target)) { + for (insTarget in target) { + if (!hasProp.call(target, insTarget)) continue; + insValue = target[insTarget]; + this.instruction(insTarget, insValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + node = new XMLProcessingInstruction(this, target, value); + this.onData(this.writer.processingInstruction(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + } + return this; + }; + + XMLDocumentCB.prototype.declaration = function(version, encoding, standalone) { + var node; + this.openCurrent(); + if (this.documentStarted) { + throw new Error("declaration() must be the first node."); + } + node = new XMLDeclaration(this, version, encoding, standalone); + this.onData(this.writer.declaration(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.doctype = function(root, pubID, sysID) { + this.openCurrent(); + if (root == null) { + throw new Error("Missing root node name."); + } + if (this.root) { + throw new Error("dtd() must come before the root node."); + } + this.currentNode = new XMLDocType(this, pubID, sysID); + this.currentNode.rootNodeName = root; + this.currentNode.children = false; + this.currentLevel++; + this.openTags[this.currentLevel] = this.currentNode; + return this; + }; + + XMLDocumentCB.prototype.dtdElement = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDElement(this, name, value); + this.onData(this.writer.dtdElement(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.attList = function(elementName, attributeName, attributeType, defaultValueType, defaultValue) { + var node; + this.openCurrent(); + node = new XMLDTDAttList(this, elementName, attributeName, attributeType, defaultValueType, defaultValue); + this.onData(this.writer.dtdAttList(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.entity = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDEntity(this, false, name, value); + this.onData(this.writer.dtdEntity(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.pEntity = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDEntity(this, true, name, value); + this.onData(this.writer.dtdEntity(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.notation = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDNotation(this, name, value); + this.onData(this.writer.dtdNotation(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.up = function() { + if (this.currentLevel < 0) { + throw new Error("The document node has no parent."); + } + if (this.currentNode) { + if (this.currentNode.children) { + this.closeNode(this.currentNode); + } else { + this.openNode(this.currentNode); + } + this.currentNode = null; + } else { + this.closeNode(this.openTags[this.currentLevel]); + } + delete this.openTags[this.currentLevel]; + this.currentLevel--; + return this; + }; + + XMLDocumentCB.prototype.end = function() { + while (this.currentLevel >= 0) { + this.up(); + } + return this.onEnd(); + }; + + XMLDocumentCB.prototype.openCurrent = function() { + if (this.currentNode) { + this.currentNode.children = true; + return this.openNode(this.currentNode); + } + }; + + XMLDocumentCB.prototype.openNode = function(node) { + var att, chunk, name, ref1; + if (!node.isOpen) { + if (!this.root && this.currentLevel === 0 && node.type === NodeType.Element) { + this.root = node; + } + chunk = ''; + if (node.type === NodeType.Element) { + this.writerOptions.state = WriterState.OpenTag; + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + '<' + node.name; + ref1 = node.attribs; + for (name in ref1) { + if (!hasProp.call(ref1, name)) continue; + att = ref1[name]; + chunk += this.writer.attribute(att, this.writerOptions, this.currentLevel); + } + chunk += (node.children ? '>' : '/>') + this.writer.endline(node, this.writerOptions, this.currentLevel); + this.writerOptions.state = WriterState.InsideTag; + } else { + this.writerOptions.state = WriterState.OpenTag; + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + ''; + } + chunk += this.writer.endline(node, this.writerOptions, this.currentLevel); + } + this.onData(chunk, this.currentLevel); + return node.isOpen = true; + } + }; + + XMLDocumentCB.prototype.closeNode = function(node) { + var chunk; + if (!node.isClosed) { + chunk = ''; + this.writerOptions.state = WriterState.CloseTag; + if (node.type === NodeType.Element) { + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + '' + this.writer.endline(node, this.writerOptions, this.currentLevel); + } else { + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + ']>' + this.writer.endline(node, this.writerOptions, this.currentLevel); + } + this.writerOptions.state = WriterState.None; + this.onData(chunk, this.currentLevel); + return node.isClosed = true; + } + }; + + XMLDocumentCB.prototype.onData = function(chunk, level) { + this.documentStarted = true; + return this.onDataCallback(chunk, level + 1); + }; + + XMLDocumentCB.prototype.onEnd = function() { + this.documentCompleted = true; + return this.onEndCallback(); + }; + + XMLDocumentCB.prototype.debugInfo = function(name) { + if (name == null) { + return ""; + } else { + return "node: <" + name + ">"; + } + }; + + XMLDocumentCB.prototype.ele = function() { + return this.element.apply(this, arguments); + }; + + XMLDocumentCB.prototype.nod = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLDocumentCB.prototype.txt = function(value) { + return this.text(value); + }; + + XMLDocumentCB.prototype.dat = function(value) { + return this.cdata(value); + }; + + XMLDocumentCB.prototype.com = function(value) { + return this.comment(value); + }; + + XMLDocumentCB.prototype.ins = function(target, value) { + return this.instruction(target, value); + }; + + XMLDocumentCB.prototype.dec = function(version, encoding, standalone) { + return this.declaration(version, encoding, standalone); + }; + + XMLDocumentCB.prototype.dtd = function(root, pubID, sysID) { + return this.doctype(root, pubID, sysID); + }; + + XMLDocumentCB.prototype.e = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLDocumentCB.prototype.n = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLDocumentCB.prototype.t = function(value) { + return this.text(value); + }; + + XMLDocumentCB.prototype.d = function(value) { + return this.cdata(value); + }; + + XMLDocumentCB.prototype.c = function(value) { + return this.comment(value); + }; + + XMLDocumentCB.prototype.r = function(value) { + return this.raw(value); + }; + + XMLDocumentCB.prototype.i = function(target, value) { + return this.instruction(target, value); + }; + + XMLDocumentCB.prototype.att = function() { + if (this.currentNode && this.currentNode.type === NodeType.DocType) { + return this.attList.apply(this, arguments); + } else { + return this.attribute.apply(this, arguments); + } + }; + + XMLDocumentCB.prototype.a = function() { + if (this.currentNode && this.currentNode.type === NodeType.DocType) { + return this.attList.apply(this, arguments); + } else { + return this.attribute.apply(this, arguments); + } + }; + + XMLDocumentCB.prototype.ent = function(name, value) { + return this.entity(name, value); + }; + + XMLDocumentCB.prototype.pent = function(name, value) { + return this.pEntity(name, value); + }; + + XMLDocumentCB.prototype.not = function(name, value) { + return this.notation(name, value); + }; + + return XMLDocumentCB; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocumentFragment.js b/node_modules/xmlbuilder/lib/XMLDocumentFragment.js new file mode 100644 index 0000000..5d6039c --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocumentFragment.js @@ -0,0 +1,24 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDocumentFragment, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDocumentFragment = (function(superClass) { + extend(XMLDocumentFragment, superClass); + + function XMLDocumentFragment() { + XMLDocumentFragment.__super__.constructor.call(this, null); + this.name = "#document-fragment"; + this.type = NodeType.DocumentFragment; + } + + return XMLDocumentFragment; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDummy.js b/node_modules/xmlbuilder/lib/XMLDummy.js new file mode 100644 index 0000000..b26083a --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDummy.js @@ -0,0 +1,31 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDummy, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDummy = (function(superClass) { + extend(XMLDummy, superClass); + + function XMLDummy(parent) { + XMLDummy.__super__.constructor.call(this, parent); + this.type = NodeType.Dummy; + } + + XMLDummy.prototype.clone = function() { + return Object.create(this); + }; + + XMLDummy.prototype.toString = function(options) { + return ''; + }; + + return XMLDummy; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLElement.js b/node_modules/xmlbuilder/lib/XMLElement.js new file mode 100644 index 0000000..c165729 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLElement.js @@ -0,0 +1,298 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLAttribute, XMLElement, XMLNamedNodeMap, XMLNode, getValue, isFunction, isObject, ref, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + ref = require('./Utility'), isObject = ref.isObject, isFunction = ref.isFunction, getValue = ref.getValue; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + XMLAttribute = require('./XMLAttribute'); + + XMLNamedNodeMap = require('./XMLNamedNodeMap'); + + module.exports = XMLElement = (function(superClass) { + extend(XMLElement, superClass); + + function XMLElement(parent, name, attributes) { + var child, j, len, ref1; + XMLElement.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing element name. " + this.debugInfo()); + } + this.name = this.stringify.name(name); + this.type = NodeType.Element; + this.attribs = {}; + this.schemaTypeInfo = null; + if (attributes != null) { + this.attribute(attributes); + } + if (parent.type === NodeType.Document) { + this.isRoot = true; + this.documentObject = parent; + parent.rootObject = this; + if (parent.children) { + ref1 = parent.children; + for (j = 0, len = ref1.length; j < len; j++) { + child = ref1[j]; + if (child.type === NodeType.DocType) { + child.name = this.name; + break; + } + } + } + } + } + + Object.defineProperty(XMLElement.prototype, 'tagName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLElement.prototype, 'namespaceURI', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLElement.prototype, 'prefix', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLElement.prototype, 'localName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLElement.prototype, 'id', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'className', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'classList', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'attributes', { + get: function() { + if (!this.attributeMap || !this.attributeMap.nodes) { + this.attributeMap = new XMLNamedNodeMap(this.attribs); + } + return this.attributeMap; + } + }); + + XMLElement.prototype.clone = function() { + var att, attName, clonedSelf, ref1; + clonedSelf = Object.create(this); + if (clonedSelf.isRoot) { + clonedSelf.documentObject = null; + } + clonedSelf.attribs = {}; + ref1 = this.attribs; + for (attName in ref1) { + if (!hasProp.call(ref1, attName)) continue; + att = ref1[attName]; + clonedSelf.attribs[attName] = att.clone(); + } + clonedSelf.children = []; + this.children.forEach(function(child) { + var clonedChild; + clonedChild = child.clone(); + clonedChild.parent = clonedSelf; + return clonedSelf.children.push(clonedChild); + }); + return clonedSelf; + }; + + XMLElement.prototype.attribute = function(name, value) { + var attName, attValue; + if (name != null) { + name = getValue(name); + } + if (isObject(name)) { + for (attName in name) { + if (!hasProp.call(name, attName)) continue; + attValue = name[attName]; + this.attribute(attName, attValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + if (this.options.keepNullAttributes && (value == null)) { + this.attribs[name] = new XMLAttribute(this, name, ""); + } else if (value != null) { + this.attribs[name] = new XMLAttribute(this, name, value); + } + } + return this; + }; + + XMLElement.prototype.removeAttribute = function(name) { + var attName, j, len; + if (name == null) { + throw new Error("Missing attribute name. " + this.debugInfo()); + } + name = getValue(name); + if (Array.isArray(name)) { + for (j = 0, len = name.length; j < len; j++) { + attName = name[j]; + delete this.attribs[attName]; + } + } else { + delete this.attribs[name]; + } + return this; + }; + + XMLElement.prototype.toString = function(options) { + return this.options.writer.element(this, this.options.writer.filterOptions(options)); + }; + + XMLElement.prototype.att = function(name, value) { + return this.attribute(name, value); + }; + + XMLElement.prototype.a = function(name, value) { + return this.attribute(name, value); + }; + + XMLElement.prototype.getAttribute = function(name) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name].value; + } else { + return null; + } + }; + + XMLElement.prototype.setAttribute = function(name, value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNode = function(name) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name]; + } else { + return null; + } + }; + + XMLElement.prototype.setAttributeNode = function(newAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.removeAttributeNode = function(oldAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagName = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setAttributeNS = function(namespaceURI, qualifiedName, value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.removeAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNodeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setAttributeNodeNS = function(newAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.hasAttribute = function(name) { + return this.attribs.hasOwnProperty(name); + }; + + XMLElement.prototype.hasAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setIdAttribute = function(name, isId) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name].isId; + } else { + return isId; + } + }; + + XMLElement.prototype.setIdAttributeNS = function(namespaceURI, localName, isId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setIdAttributeNode = function(idAttr, isId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagName = function(tagname) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByClassName = function(classNames) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.isEqualNode = function(node) { + var i, j, ref1; + if (!XMLElement.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.namespaceURI !== this.namespaceURI) { + return false; + } + if (node.prefix !== this.prefix) { + return false; + } + if (node.localName !== this.localName) { + return false; + } + if (node.attribs.length !== this.attribs.length) { + return false; + } + for (i = j = 0, ref1 = this.attribs.length - 1; 0 <= ref1 ? j <= ref1 : j >= ref1; i = 0 <= ref1 ? ++j : --j) { + if (!this.attribs[i].isEqualNode(node.attribs[i])) { + return false; + } + } + return true; + }; + + return XMLElement; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNamedNodeMap.js b/node_modules/xmlbuilder/lib/XMLNamedNodeMap.js new file mode 100644 index 0000000..885402d --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNamedNodeMap.js @@ -0,0 +1,58 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNamedNodeMap; + + module.exports = XMLNamedNodeMap = (function() { + function XMLNamedNodeMap(nodes) { + this.nodes = nodes; + } + + Object.defineProperty(XMLNamedNodeMap.prototype, 'length', { + get: function() { + return Object.keys(this.nodes).length || 0; + } + }); + + XMLNamedNodeMap.prototype.clone = function() { + return this.nodes = null; + }; + + XMLNamedNodeMap.prototype.getNamedItem = function(name) { + return this.nodes[name]; + }; + + XMLNamedNodeMap.prototype.setNamedItem = function(node) { + var oldNode; + oldNode = this.nodes[node.nodeName]; + this.nodes[node.nodeName] = node; + return oldNode || null; + }; + + XMLNamedNodeMap.prototype.removeNamedItem = function(name) { + var oldNode; + oldNode = this.nodes[name]; + delete this.nodes[name]; + return oldNode || null; + }; + + XMLNamedNodeMap.prototype.item = function(index) { + return this.nodes[Object.keys(this.nodes)[index]] || null; + }; + + XMLNamedNodeMap.prototype.getNamedItemNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented."); + }; + + XMLNamedNodeMap.prototype.setNamedItemNS = function(node) { + throw new Error("This DOM method is not implemented."); + }; + + XMLNamedNodeMap.prototype.removeNamedItemNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLNamedNodeMap; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNode.js b/node_modules/xmlbuilder/lib/XMLNode.js new file mode 100644 index 0000000..e2c7bb7 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNode.js @@ -0,0 +1,785 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var DocumentPosition, NodeType, XMLCData, XMLComment, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLNamedNodeMap, XMLNode, XMLNodeList, XMLProcessingInstruction, XMLRaw, XMLText, getValue, isEmpty, isFunction, isObject, ref1, + hasProp = {}.hasOwnProperty; + + ref1 = require('./Utility'), isObject = ref1.isObject, isFunction = ref1.isFunction, isEmpty = ref1.isEmpty, getValue = ref1.getValue; + + XMLElement = null; + + XMLCData = null; + + XMLComment = null; + + XMLDeclaration = null; + + XMLDocType = null; + + XMLRaw = null; + + XMLText = null; + + XMLProcessingInstruction = null; + + XMLDummy = null; + + NodeType = null; + + XMLNodeList = null; + + XMLNamedNodeMap = null; + + DocumentPosition = null; + + module.exports = XMLNode = (function() { + function XMLNode(parent1) { + this.parent = parent1; + if (this.parent) { + this.options = this.parent.options; + this.stringify = this.parent.stringify; + } + this.value = null; + this.children = []; + this.baseURI = null; + if (!XMLElement) { + XMLElement = require('./XMLElement'); + XMLCData = require('./XMLCData'); + XMLComment = require('./XMLComment'); + XMLDeclaration = require('./XMLDeclaration'); + XMLDocType = require('./XMLDocType'); + XMLRaw = require('./XMLRaw'); + XMLText = require('./XMLText'); + XMLProcessingInstruction = require('./XMLProcessingInstruction'); + XMLDummy = require('./XMLDummy'); + NodeType = require('./NodeType'); + XMLNodeList = require('./XMLNodeList'); + XMLNamedNodeMap = require('./XMLNamedNodeMap'); + DocumentPosition = require('./DocumentPosition'); + } + } + + Object.defineProperty(XMLNode.prototype, 'nodeName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nodeType', { + get: function() { + return this.type; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nodeValue', { + get: function() { + return this.value; + } + }); + + Object.defineProperty(XMLNode.prototype, 'parentNode', { + get: function() { + return this.parent; + } + }); + + Object.defineProperty(XMLNode.prototype, 'childNodes', { + get: function() { + if (!this.childNodeList || !this.childNodeList.nodes) { + this.childNodeList = new XMLNodeList(this.children); + } + return this.childNodeList; + } + }); + + Object.defineProperty(XMLNode.prototype, 'firstChild', { + get: function() { + return this.children[0] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'lastChild', { + get: function() { + return this.children[this.children.length - 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'previousSibling', { + get: function() { + var i; + i = this.parent.children.indexOf(this); + return this.parent.children[i - 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nextSibling', { + get: function() { + var i; + i = this.parent.children.indexOf(this); + return this.parent.children[i + 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'ownerDocument', { + get: function() { + return this.document() || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'textContent', { + get: function() { + var child, j, len, ref2, str; + if (this.nodeType === NodeType.Element || this.nodeType === NodeType.DocumentFragment) { + str = ''; + ref2 = this.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (child.textContent) { + str += child.textContent; + } + } + return str; + } else { + return null; + } + }, + set: function(value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + XMLNode.prototype.setParent = function(parent) { + var child, j, len, ref2, results; + this.parent = parent; + if (parent) { + this.options = parent.options; + this.stringify = parent.stringify; + } + ref2 = this.children; + results = []; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + results.push(child.setParent(this)); + } + return results; + }; + + XMLNode.prototype.element = function(name, attributes, text) { + var childNode, item, j, k, key, lastChild, len, len1, ref2, ref3, val; + lastChild = null; + if (attributes === null && (text == null)) { + ref2 = [{}, null], attributes = ref2[0], text = ref2[1]; + } + if (attributes == null) { + attributes = {}; + } + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref3 = [attributes, text], text = ref3[0], attributes = ref3[1]; + } + if (name != null) { + name = getValue(name); + } + if (Array.isArray(name)) { + for (j = 0, len = name.length; j < len; j++) { + item = name[j]; + lastChild = this.element(item); + } + } else if (isFunction(name)) { + lastChild = this.element(name.apply()); + } else if (isObject(name)) { + for (key in name) { + if (!hasProp.call(name, key)) continue; + val = name[key]; + if (isFunction(val)) { + val = val.apply(); + } + if (!this.options.ignoreDecorators && this.stringify.convertAttKey && key.indexOf(this.stringify.convertAttKey) === 0) { + lastChild = this.attribute(key.substr(this.stringify.convertAttKey.length), val); + } else if (!this.options.separateArrayItems && Array.isArray(val) && isEmpty(val)) { + lastChild = this.dummy(); + } else if (isObject(val) && isEmpty(val)) { + lastChild = this.element(key); + } else if (!this.options.keepNullNodes && (val == null)) { + lastChild = this.dummy(); + } else if (!this.options.separateArrayItems && Array.isArray(val)) { + for (k = 0, len1 = val.length; k < len1; k++) { + item = val[k]; + childNode = {}; + childNode[key] = item; + lastChild = this.element(childNode); + } + } else if (isObject(val)) { + if (!this.options.ignoreDecorators && this.stringify.convertTextKey && key.indexOf(this.stringify.convertTextKey) === 0) { + lastChild = this.element(val); + } else { + lastChild = this.element(key); + lastChild.element(val); + } + } else { + lastChild = this.element(key, val); + } + } + } else if (!this.options.keepNullNodes && text === null) { + lastChild = this.dummy(); + } else { + if (!this.options.ignoreDecorators && this.stringify.convertTextKey && name.indexOf(this.stringify.convertTextKey) === 0) { + lastChild = this.text(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertCDataKey && name.indexOf(this.stringify.convertCDataKey) === 0) { + lastChild = this.cdata(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertCommentKey && name.indexOf(this.stringify.convertCommentKey) === 0) { + lastChild = this.comment(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertRawKey && name.indexOf(this.stringify.convertRawKey) === 0) { + lastChild = this.raw(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertPIKey && name.indexOf(this.stringify.convertPIKey) === 0) { + lastChild = this.instruction(name.substr(this.stringify.convertPIKey.length), text); + } else { + lastChild = this.node(name, attributes, text); + } + } + if (lastChild == null) { + throw new Error("Could not create any elements with: " + name + ". " + this.debugInfo()); + } + return lastChild; + }; + + XMLNode.prototype.insertBefore = function(name, attributes, text) { + var child, i, newChild, refChild, removed; + if (name != null ? name.type : void 0) { + newChild = name; + refChild = attributes; + newChild.setParent(this); + if (refChild) { + i = children.indexOf(refChild); + removed = children.splice(i); + children.push(newChild); + Array.prototype.push.apply(children, removed); + } else { + children.push(newChild); + } + return newChild; + } else { + if (this.isRoot) { + throw new Error("Cannot insert elements at root level. " + this.debugInfo(name)); + } + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.element(name, attributes, text); + Array.prototype.push.apply(this.parent.children, removed); + return child; + } + }; + + XMLNode.prototype.insertAfter = function(name, attributes, text) { + var child, i, removed; + if (this.isRoot) { + throw new Error("Cannot insert elements at root level. " + this.debugInfo(name)); + } + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.element(name, attributes, text); + Array.prototype.push.apply(this.parent.children, removed); + return child; + }; + + XMLNode.prototype.remove = function() { + var i, ref2; + if (this.isRoot) { + throw new Error("Cannot remove the root element. " + this.debugInfo()); + } + i = this.parent.children.indexOf(this); + [].splice.apply(this.parent.children, [i, i - i + 1].concat(ref2 = [])), ref2; + return this.parent; + }; + + XMLNode.prototype.node = function(name, attributes, text) { + var child, ref2; + if (name != null) { + name = getValue(name); + } + attributes || (attributes = {}); + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref2 = [attributes, text], text = ref2[0], attributes = ref2[1]; + } + child = new XMLElement(this, name, attributes); + if (text != null) { + child.text(text); + } + this.children.push(child); + return child; + }; + + XMLNode.prototype.text = function(value) { + var child; + if (isObject(value)) { + this.element(value); + } + child = new XMLText(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.cdata = function(value) { + var child; + child = new XMLCData(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.comment = function(value) { + var child; + child = new XMLComment(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.commentBefore = function(value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.comment(value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.commentAfter = function(value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.comment(value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.raw = function(value) { + var child; + child = new XMLRaw(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.dummy = function() { + var child; + child = new XMLDummy(this); + return child; + }; + + XMLNode.prototype.instruction = function(target, value) { + var insTarget, insValue, instruction, j, len; + if (target != null) { + target = getValue(target); + } + if (value != null) { + value = getValue(value); + } + if (Array.isArray(target)) { + for (j = 0, len = target.length; j < len; j++) { + insTarget = target[j]; + this.instruction(insTarget); + } + } else if (isObject(target)) { + for (insTarget in target) { + if (!hasProp.call(target, insTarget)) continue; + insValue = target[insTarget]; + this.instruction(insTarget, insValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + instruction = new XMLProcessingInstruction(this, target, value); + this.children.push(instruction); + } + return this; + }; + + XMLNode.prototype.instructionBefore = function(target, value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.instruction(target, value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.instructionAfter = function(target, value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.instruction(target, value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.declaration = function(version, encoding, standalone) { + var doc, xmldec; + doc = this.document(); + xmldec = new XMLDeclaration(doc, version, encoding, standalone); + if (doc.children.length === 0) { + doc.children.unshift(xmldec); + } else if (doc.children[0].type === NodeType.Declaration) { + doc.children[0] = xmldec; + } else { + doc.children.unshift(xmldec); + } + return doc.root() || doc; + }; + + XMLNode.prototype.dtd = function(pubID, sysID) { + var child, doc, doctype, i, j, k, len, len1, ref2, ref3; + doc = this.document(); + doctype = new XMLDocType(doc, pubID, sysID); + ref2 = doc.children; + for (i = j = 0, len = ref2.length; j < len; i = ++j) { + child = ref2[i]; + if (child.type === NodeType.DocType) { + doc.children[i] = doctype; + return doctype; + } + } + ref3 = doc.children; + for (i = k = 0, len1 = ref3.length; k < len1; i = ++k) { + child = ref3[i]; + if (child.isRoot) { + doc.children.splice(i, 0, doctype); + return doctype; + } + } + doc.children.push(doctype); + return doctype; + }; + + XMLNode.prototype.up = function() { + if (this.isRoot) { + throw new Error("The root node has no parent. Use doc() if you need to get the document object."); + } + return this.parent; + }; + + XMLNode.prototype.root = function() { + var node; + node = this; + while (node) { + if (node.type === NodeType.Document) { + return node.rootObject; + } else if (node.isRoot) { + return node; + } else { + node = node.parent; + } + } + }; + + XMLNode.prototype.document = function() { + var node; + node = this; + while (node) { + if (node.type === NodeType.Document) { + return node; + } else { + node = node.parent; + } + } + }; + + XMLNode.prototype.end = function(options) { + return this.document().end(options); + }; + + XMLNode.prototype.prev = function() { + var i; + i = this.parent.children.indexOf(this); + if (i < 1) { + throw new Error("Already at the first node. " + this.debugInfo()); + } + return this.parent.children[i - 1]; + }; + + XMLNode.prototype.next = function() { + var i; + i = this.parent.children.indexOf(this); + if (i === -1 || i === this.parent.children.length - 1) { + throw new Error("Already at the last node. " + this.debugInfo()); + } + return this.parent.children[i + 1]; + }; + + XMLNode.prototype.importDocument = function(doc) { + var clonedRoot; + clonedRoot = doc.root().clone(); + clonedRoot.parent = this; + clonedRoot.isRoot = false; + this.children.push(clonedRoot); + return this; + }; + + XMLNode.prototype.debugInfo = function(name) { + var ref2, ref3; + name = name || this.name; + if ((name == null) && !((ref2 = this.parent) != null ? ref2.name : void 0)) { + return ""; + } else if (name == null) { + return "parent: <" + this.parent.name + ">"; + } else if (!((ref3 = this.parent) != null ? ref3.name : void 0)) { + return "node: <" + name + ">"; + } else { + return "node: <" + name + ">, parent: <" + this.parent.name + ">"; + } + }; + + XMLNode.prototype.ele = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLNode.prototype.nod = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLNode.prototype.txt = function(value) { + return this.text(value); + }; + + XMLNode.prototype.dat = function(value) { + return this.cdata(value); + }; + + XMLNode.prototype.com = function(value) { + return this.comment(value); + }; + + XMLNode.prototype.ins = function(target, value) { + return this.instruction(target, value); + }; + + XMLNode.prototype.doc = function() { + return this.document(); + }; + + XMLNode.prototype.dec = function(version, encoding, standalone) { + return this.declaration(version, encoding, standalone); + }; + + XMLNode.prototype.e = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLNode.prototype.n = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLNode.prototype.t = function(value) { + return this.text(value); + }; + + XMLNode.prototype.d = function(value) { + return this.cdata(value); + }; + + XMLNode.prototype.c = function(value) { + return this.comment(value); + }; + + XMLNode.prototype.r = function(value) { + return this.raw(value); + }; + + XMLNode.prototype.i = function(target, value) { + return this.instruction(target, value); + }; + + XMLNode.prototype.u = function() { + return this.up(); + }; + + XMLNode.prototype.importXMLBuilder = function(doc) { + return this.importDocument(doc); + }; + + XMLNode.prototype.replaceChild = function(newChild, oldChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.removeChild = function(oldChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.appendChild = function(newChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.hasChildNodes = function() { + return this.children.length !== 0; + }; + + XMLNode.prototype.cloneNode = function(deep) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.normalize = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isSupported = function(feature, version) { + return true; + }; + + XMLNode.prototype.hasAttributes = function() { + return this.attribs.length !== 0; + }; + + XMLNode.prototype.compareDocumentPosition = function(other) { + var ref, res; + ref = this; + if (ref === other) { + return 0; + } else if (this.document() !== other.document()) { + res = DocumentPosition.Disconnected | DocumentPosition.ImplementationSpecific; + if (Math.random() < 0.5) { + res |= DocumentPosition.Preceding; + } else { + res |= DocumentPosition.Following; + } + return res; + } else if (ref.isAncestor(other)) { + return DocumentPosition.Contains | DocumentPosition.Preceding; + } else if (ref.isDescendant(other)) { + return DocumentPosition.Contains | DocumentPosition.Following; + } else if (ref.isPreceding(other)) { + return DocumentPosition.Preceding; + } else { + return DocumentPosition.Following; + } + }; + + XMLNode.prototype.isSameNode = function(other) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.lookupPrefix = function(namespaceURI) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isDefaultNamespace = function(namespaceURI) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.lookupNamespaceURI = function(prefix) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isEqualNode = function(node) { + var i, j, ref2; + if (node.nodeType !== this.nodeType) { + return false; + } + if (node.children.length !== this.children.length) { + return false; + } + for (i = j = 0, ref2 = this.children.length - 1; 0 <= ref2 ? j <= ref2 : j >= ref2; i = 0 <= ref2 ? ++j : --j) { + if (!this.children[i].isEqualNode(node.children[i])) { + return false; + } + } + return true; + }; + + XMLNode.prototype.getFeature = function(feature, version) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.setUserData = function(key, data, handler) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.getUserData = function(key) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.contains = function(other) { + if (!other) { + return false; + } + return other === this || this.isDescendant(other); + }; + + XMLNode.prototype.isDescendant = function(node) { + var child, isDescendantChild, j, len, ref2; + ref2 = this.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (node === child) { + return true; + } + isDescendantChild = child.isDescendant(node); + if (isDescendantChild) { + return true; + } + } + return false; + }; + + XMLNode.prototype.isAncestor = function(node) { + return node.isDescendant(this); + }; + + XMLNode.prototype.isPreceding = function(node) { + var nodePos, thisPos; + nodePos = this.treePosition(node); + thisPos = this.treePosition(this); + if (nodePos === -1 || thisPos === -1) { + return false; + } else { + return nodePos < thisPos; + } + }; + + XMLNode.prototype.isFollowing = function(node) { + var nodePos, thisPos; + nodePos = this.treePosition(node); + thisPos = this.treePosition(this); + if (nodePos === -1 || thisPos === -1) { + return false; + } else { + return nodePos > thisPos; + } + }; + + XMLNode.prototype.treePosition = function(node) { + var found, pos; + pos = 0; + found = false; + this.foreachTreeNode(this.document(), function(childNode) { + pos++; + if (!found && childNode === node) { + return found = true; + } + }); + if (found) { + return pos; + } else { + return -1; + } + }; + + XMLNode.prototype.foreachTreeNode = function(node, func) { + var child, j, len, ref2, res; + node || (node = this.document()); + ref2 = node.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (res = func(child)) { + return res; + } else { + res = this.foreachTreeNode(child, func); + if (res) { + return res; + } + } + } + }; + + return XMLNode; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNodeFilter.js b/node_modules/xmlbuilder/lib/XMLNodeFilter.js new file mode 100644 index 0000000..ce32fd5 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNodeFilter.js @@ -0,0 +1,48 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNodeFilter; + + module.exports = XMLNodeFilter = (function() { + function XMLNodeFilter() {} + + XMLNodeFilter.prototype.FilterAccept = 1; + + XMLNodeFilter.prototype.FilterReject = 2; + + XMLNodeFilter.prototype.FilterSkip = 3; + + XMLNodeFilter.prototype.ShowAll = 0xffffffff; + + XMLNodeFilter.prototype.ShowElement = 0x1; + + XMLNodeFilter.prototype.ShowAttribute = 0x2; + + XMLNodeFilter.prototype.ShowText = 0x4; + + XMLNodeFilter.prototype.ShowCDataSection = 0x8; + + XMLNodeFilter.prototype.ShowEntityReference = 0x10; + + XMLNodeFilter.prototype.ShowEntity = 0x20; + + XMLNodeFilter.prototype.ShowProcessingInstruction = 0x40; + + XMLNodeFilter.prototype.ShowComment = 0x80; + + XMLNodeFilter.prototype.ShowDocument = 0x100; + + XMLNodeFilter.prototype.ShowDocumentType = 0x200; + + XMLNodeFilter.prototype.ShowDocumentFragment = 0x400; + + XMLNodeFilter.prototype.ShowNotation = 0x800; + + XMLNodeFilter.prototype.acceptNode = function(node) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLNodeFilter; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNodeList.js b/node_modules/xmlbuilder/lib/XMLNodeList.js new file mode 100644 index 0000000..3414a3e --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNodeList.js @@ -0,0 +1,28 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNodeList; + + module.exports = XMLNodeList = (function() { + function XMLNodeList(nodes) { + this.nodes = nodes; + } + + Object.defineProperty(XMLNodeList.prototype, 'length', { + get: function() { + return this.nodes.length || 0; + } + }); + + XMLNodeList.prototype.clone = function() { + return this.nodes = null; + }; + + XMLNodeList.prototype.item = function(index) { + return this.nodes[index] || null; + }; + + return XMLNodeList; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js b/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js new file mode 100644 index 0000000..d4333d4 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js @@ -0,0 +1,49 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCharacterData, XMLProcessingInstruction, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLCharacterData = require('./XMLCharacterData'); + + module.exports = XMLProcessingInstruction = (function(superClass) { + extend(XMLProcessingInstruction, superClass); + + function XMLProcessingInstruction(parent, target, value) { + XMLProcessingInstruction.__super__.constructor.call(this, parent); + if (target == null) { + throw new Error("Missing instruction target. " + this.debugInfo()); + } + this.type = NodeType.ProcessingInstruction; + this.target = this.stringify.insTarget(target); + this.name = this.target; + if (value) { + this.value = this.stringify.insValue(value); + } + } + + XMLProcessingInstruction.prototype.clone = function() { + return Object.create(this); + }; + + XMLProcessingInstruction.prototype.toString = function(options) { + return this.options.writer.processingInstruction(this, this.options.writer.filterOptions(options)); + }; + + XMLProcessingInstruction.prototype.isEqualNode = function(node) { + if (!XMLProcessingInstruction.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.target !== this.target) { + return false; + } + return true; + }; + + return XMLProcessingInstruction; + + })(XMLCharacterData); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLRaw.js b/node_modules/xmlbuilder/lib/XMLRaw.js new file mode 100644 index 0000000..b592850 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLRaw.js @@ -0,0 +1,35 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLNode, XMLRaw, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLNode = require('./XMLNode'); + + module.exports = XMLRaw = (function(superClass) { + extend(XMLRaw, superClass); + + function XMLRaw(parent, text) { + XMLRaw.__super__.constructor.call(this, parent); + if (text == null) { + throw new Error("Missing raw text. " + this.debugInfo()); + } + this.type = NodeType.Raw; + this.value = this.stringify.raw(text); + } + + XMLRaw.prototype.clone = function() { + return Object.create(this); + }; + + XMLRaw.prototype.toString = function(options) { + return this.options.writer.raw(this, this.options.writer.filterOptions(options)); + }; + + return XMLRaw; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLStreamWriter.js b/node_modules/xmlbuilder/lib/XMLStreamWriter.js new file mode 100644 index 0000000..159dc6b --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLStreamWriter.js @@ -0,0 +1,176 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLStreamWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLWriterBase = require('./XMLWriterBase'); + + WriterState = require('./WriterState'); + + module.exports = XMLStreamWriter = (function(superClass) { + extend(XMLStreamWriter, superClass); + + function XMLStreamWriter(stream, options) { + this.stream = stream; + XMLStreamWriter.__super__.constructor.call(this, options); + } + + XMLStreamWriter.prototype.endline = function(node, options, level) { + if (node.isLastRootNode && options.state === WriterState.CloseTag) { + return ''; + } else { + return XMLStreamWriter.__super__.endline.call(this, node, options, level); + } + }; + + XMLStreamWriter.prototype.document = function(doc, options) { + var child, i, j, k, len, len1, ref, ref1, results; + ref = doc.children; + for (i = j = 0, len = ref.length; j < len; i = ++j) { + child = ref[i]; + child.isLastRootNode = i === doc.children.length - 1; + } + options = this.filterOptions(options); + ref1 = doc.children; + results = []; + for (k = 0, len1 = ref1.length; k < len1; k++) { + child = ref1[k]; + results.push(this.writeChildNode(child, options, 0)); + } + return results; + }; + + XMLStreamWriter.prototype.attribute = function(att, options, level) { + return this.stream.write(XMLStreamWriter.__super__.attribute.call(this, att, options, level)); + }; + + XMLStreamWriter.prototype.cdata = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.cdata.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.comment = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.comment.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.declaration = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.declaration.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.docType = function(node, options, level) { + var child, j, len, ref; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + this.stream.write(this.indent(node, options, level)); + this.stream.write(' 0) { + this.stream.write(' ['); + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.InsideTag; + ref = node.children; + for (j = 0, len = ref.length; j < len; j++) { + child = ref[j]; + this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + this.stream.write(']'); + } + options.state = WriterState.CloseTag; + this.stream.write(options.spaceBeforeSlash + '>'); + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.None; + return this.closeNode(node, options, level); + }; + + XMLStreamWriter.prototype.element = function(node, options, level) { + var att, child, childNodeCount, firstChildNode, j, len, name, prettySuppressed, ref, ref1; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + this.stream.write(this.indent(node, options, level) + '<' + node.name); + ref = node.attribs; + for (name in ref) { + if (!hasProp.call(ref, name)) continue; + att = ref[name]; + this.attribute(att, options, level); + } + childNodeCount = node.children.length; + firstChildNode = childNodeCount === 0 ? null : node.children[0]; + if (childNodeCount === 0 || node.children.every(function(e) { + return (e.type === NodeType.Text || e.type === NodeType.Raw) && e.value === ''; + })) { + if (options.allowEmpty) { + this.stream.write('>'); + options.state = WriterState.CloseTag; + this.stream.write(''); + } else { + options.state = WriterState.CloseTag; + this.stream.write(options.spaceBeforeSlash + '/>'); + } + } else if (options.pretty && childNodeCount === 1 && (firstChildNode.type === NodeType.Text || firstChildNode.type === NodeType.Raw) && (firstChildNode.value != null)) { + this.stream.write('>'); + options.state = WriterState.InsideTag; + options.suppressPrettyCount++; + prettySuppressed = true; + this.writeChildNode(firstChildNode, options, level + 1); + options.suppressPrettyCount--; + prettySuppressed = false; + options.state = WriterState.CloseTag; + this.stream.write(''); + } else { + this.stream.write('>' + this.endline(node, options, level)); + options.state = WriterState.InsideTag; + ref1 = node.children; + for (j = 0, len = ref1.length; j < len; j++) { + child = ref1[j]; + this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + this.stream.write(this.indent(node, options, level) + ''); + } + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.None; + return this.closeNode(node, options, level); + }; + + XMLStreamWriter.prototype.processingInstruction = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.processingInstruction.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.raw = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.raw.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.text = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.text.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdAttList = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdAttList.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdElement = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdElement.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdEntity = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdEntity.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdNotation = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdNotation.call(this, node, options, level)); + }; + + return XMLStreamWriter; + + })(XMLWriterBase); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLStringWriter.js b/node_modules/xmlbuilder/lib/XMLStringWriter.js new file mode 100644 index 0000000..7187017 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLStringWriter.js @@ -0,0 +1,35 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLWriterBase = require('./XMLWriterBase'); + + module.exports = XMLStringWriter = (function(superClass) { + extend(XMLStringWriter, superClass); + + function XMLStringWriter(options) { + XMLStringWriter.__super__.constructor.call(this, options); + } + + XMLStringWriter.prototype.document = function(doc, options) { + var child, i, len, r, ref; + options = this.filterOptions(options); + r = ''; + ref = doc.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, 0); + } + if (options.pretty && r.slice(-options.newline.length) === options.newline) { + r = r.slice(0, -options.newline.length); + } + return r; + }; + + return XMLStringWriter; + + })(XMLWriterBase); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLStringifier.js b/node_modules/xmlbuilder/lib/XMLStringifier.js new file mode 100644 index 0000000..a39475c --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLStringifier.js @@ -0,0 +1,240 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringifier, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + hasProp = {}.hasOwnProperty; + + module.exports = XMLStringifier = (function() { + function XMLStringifier(options) { + this.assertLegalName = bind(this.assertLegalName, this); + this.assertLegalChar = bind(this.assertLegalChar, this); + var key, ref, value; + options || (options = {}); + this.options = options; + if (!this.options.version) { + this.options.version = '1.0'; + } + ref = options.stringify || {}; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this[key] = value; + } + } + + XMLStringifier.prototype.name = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalName('' + val || ''); + }; + + XMLStringifier.prototype.text = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar(this.textEscape('' + val || '')); + }; + + XMLStringifier.prototype.cdata = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + val = val.replace(']]>', ']]]]>'); + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.comment = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (val.match(/--/)) { + throw new Error("Comment text cannot contain double-hypen: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.raw = function(val) { + if (this.options.noValidation) { + return val; + } + return '' + val || ''; + }; + + XMLStringifier.prototype.attValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar(this.attEscape(val = '' + val || '')); + }; + + XMLStringifier.prototype.insTarget = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.insValue = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (val.match(/\?>/)) { + throw new Error("Invalid processing instruction value: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.xmlVersion = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (!val.match(/1\.[0-9]+/)) { + throw new Error("Invalid version number: " + val); + } + return val; + }; + + XMLStringifier.prototype.xmlEncoding = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (!val.match(/^[A-Za-z](?:[A-Za-z0-9._-])*$/)) { + throw new Error("Invalid encoding: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.xmlStandalone = function(val) { + if (this.options.noValidation) { + return val; + } + if (val) { + return "yes"; + } else { + return "no"; + } + }; + + XMLStringifier.prototype.dtdPubID = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdSysID = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdElementValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdAttType = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdAttDefault = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdEntityValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdNData = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.convertAttKey = '@'; + + XMLStringifier.prototype.convertPIKey = '?'; + + XMLStringifier.prototype.convertTextKey = '#text'; + + XMLStringifier.prototype.convertCDataKey = '#cdata'; + + XMLStringifier.prototype.convertCommentKey = '#comment'; + + XMLStringifier.prototype.convertRawKey = '#raw'; + + XMLStringifier.prototype.assertLegalChar = function(str) { + var regex, res; + if (this.options.noValidation) { + return str; + } + regex = ''; + if (this.options.version === '1.0') { + regex = /[\0-\x08\x0B\f\x0E-\x1F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; + if (res = str.match(regex)) { + throw new Error("Invalid character in string: " + str + " at index " + res.index); + } + } else if (this.options.version === '1.1') { + regex = /[\0\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; + if (res = str.match(regex)) { + throw new Error("Invalid character in string: " + str + " at index " + res.index); + } + } + return str; + }; + + XMLStringifier.prototype.assertLegalName = function(str) { + var regex; + if (this.options.noValidation) { + return str; + } + this.assertLegalChar(str); + regex = /^([:A-Z_a-z\xC0-\xD6\xD8-\xF6\xF8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDB7F][\uDC00-\uDFFF])([\x2D\.0-:A-Z_a-z\xB7\xC0-\xD6\xD8-\xF6\xF8-\u037D\u037F-\u1FFF\u200C\u200D\u203F\u2040\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDB7F][\uDC00-\uDFFF])*$/; + if (!str.match(regex)) { + throw new Error("Invalid character in name"); + } + return str; + }; + + XMLStringifier.prototype.textEscape = function(str) { + var ampregex; + if (this.options.noValidation) { + return str; + } + ampregex = this.options.noDoubleEncoding ? /(?!&\S+;)&/g : /&/g; + return str.replace(ampregex, '&').replace(//g, '>').replace(/\r/g, ' '); + }; + + XMLStringifier.prototype.attEscape = function(str) { + var ampregex; + if (this.options.noValidation) { + return str; + } + ampregex = this.options.noDoubleEncoding ? /(?!&\S+;)&/g : /&/g; + return str.replace(ampregex, '&').replace(/ 0) { + return new Array(indentLevel).join(options.indent); + } + } + return ''; + }; + + XMLWriterBase.prototype.endline = function(node, options, level) { + if (!options.pretty || options.suppressPrettyCount) { + return ''; + } else { + return options.newline; + } + }; + + XMLWriterBase.prototype.attribute = function(att, options, level) { + var r; + this.openAttribute(att, options, level); + r = ' ' + att.name + '="' + att.value + '"'; + this.closeAttribute(att, options, level); + return r; + }; + + XMLWriterBase.prototype.cdata = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.comment = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.declaration = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + ''; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.docType = function(node, options, level) { + var child, i, len, r, ref; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + r += ' 0) { + r += ' ['; + r += this.endline(node, options, level); + options.state = WriterState.InsideTag; + ref = node.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + r += ']'; + } + options.state = WriterState.CloseTag; + r += options.spaceBeforeSlash + '>'; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.element = function(node, options, level) { + var att, child, childNodeCount, firstChildNode, i, j, len, len1, name, prettySuppressed, r, ref, ref1, ref2; + level || (level = 0); + prettySuppressed = false; + r = ''; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r += this.indent(node, options, level) + '<' + node.name; + ref = node.attribs; + for (name in ref) { + if (!hasProp.call(ref, name)) continue; + att = ref[name]; + r += this.attribute(att, options, level); + } + childNodeCount = node.children.length; + firstChildNode = childNodeCount === 0 ? null : node.children[0]; + if (childNodeCount === 0 || node.children.every(function(e) { + return (e.type === NodeType.Text || e.type === NodeType.Raw) && e.value === ''; + })) { + if (options.allowEmpty) { + r += '>'; + options.state = WriterState.CloseTag; + r += '' + this.endline(node, options, level); + } else { + options.state = WriterState.CloseTag; + r += options.spaceBeforeSlash + '/>' + this.endline(node, options, level); + } + } else if (options.pretty && childNodeCount === 1 && (firstChildNode.type === NodeType.Text || firstChildNode.type === NodeType.Raw) && (firstChildNode.value != null)) { + r += '>'; + options.state = WriterState.InsideTag; + options.suppressPrettyCount++; + prettySuppressed = true; + r += this.writeChildNode(firstChildNode, options, level + 1); + options.suppressPrettyCount--; + prettySuppressed = false; + options.state = WriterState.CloseTag; + r += '' + this.endline(node, options, level); + } else { + if (options.dontPrettyTextNodes) { + ref1 = node.children; + for (i = 0, len = ref1.length; i < len; i++) { + child = ref1[i]; + if ((child.type === NodeType.Text || child.type === NodeType.Raw) && (child.value != null)) { + options.suppressPrettyCount++; + prettySuppressed = true; + break; + } + } + } + r += '>' + this.endline(node, options, level); + options.state = WriterState.InsideTag; + ref2 = node.children; + for (j = 0, len1 = ref2.length; j < len1; j++) { + child = ref2[j]; + r += this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + r += this.indent(node, options, level) + ''; + if (prettySuppressed) { + options.suppressPrettyCount--; + } + r += this.endline(node, options, level); + options.state = WriterState.None; + } + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.writeChildNode = function(node, options, level) { + switch (node.type) { + case NodeType.CData: + return this.cdata(node, options, level); + case NodeType.Comment: + return this.comment(node, options, level); + case NodeType.Element: + return this.element(node, options, level); + case NodeType.Raw: + return this.raw(node, options, level); + case NodeType.Text: + return this.text(node, options, level); + case NodeType.ProcessingInstruction: + return this.processingInstruction(node, options, level); + case NodeType.Dummy: + return ''; + case NodeType.Declaration: + return this.declaration(node, options, level); + case NodeType.DocType: + return this.docType(node, options, level); + case NodeType.AttributeDeclaration: + return this.dtdAttList(node, options, level); + case NodeType.ElementDeclaration: + return this.dtdElement(node, options, level); + case NodeType.EntityDeclaration: + return this.dtdEntity(node, options, level); + case NodeType.NotationDeclaration: + return this.dtdNotation(node, options, level); + default: + throw new Error("Unknown XML node type: " + node.constructor.name); + } + }; + + XMLWriterBase.prototype.processingInstruction = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + ''; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.raw = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + options.state = WriterState.InsideTag; + r += node.value; + options.state = WriterState.CloseTag; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.text = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + options.state = WriterState.InsideTag; + r += node.value; + options.state = WriterState.CloseTag; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdAttList = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdElement = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdEntity = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdNotation = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.openNode = function(node, options, level) {}; + + XMLWriterBase.prototype.closeNode = function(node, options, level) {}; + + XMLWriterBase.prototype.openAttribute = function(att, options, level) {}; + + XMLWriterBase.prototype.closeAttribute = function(att, options, level) {}; + + return XMLWriterBase; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/index.js b/node_modules/xmlbuilder/lib/index.js new file mode 100644 index 0000000..b1ed263 --- /dev/null +++ b/node_modules/xmlbuilder/lib/index.js @@ -0,0 +1,65 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLDOMImplementation, XMLDocument, XMLDocumentCB, XMLStreamWriter, XMLStringWriter, assign, isFunction, ref; + + ref = require('./Utility'), assign = ref.assign, isFunction = ref.isFunction; + + XMLDOMImplementation = require('./XMLDOMImplementation'); + + XMLDocument = require('./XMLDocument'); + + XMLDocumentCB = require('./XMLDocumentCB'); + + XMLStringWriter = require('./XMLStringWriter'); + + XMLStreamWriter = require('./XMLStreamWriter'); + + NodeType = require('./NodeType'); + + WriterState = require('./WriterState'); + + module.exports.create = function(name, xmldec, doctype, options) { + var doc, root; + if (name == null) { + throw new Error("Root element needs a name."); + } + options = assign({}, xmldec, doctype, options); + doc = new XMLDocument(options); + root = doc.element(name); + if (!options.headless) { + doc.declaration(options); + if ((options.pubID != null) || (options.sysID != null)) { + doc.dtd(options); + } + } + return root; + }; + + module.exports.begin = function(options, onData, onEnd) { + var ref1; + if (isFunction(options)) { + ref1 = [options, onData], onData = ref1[0], onEnd = ref1[1]; + options = {}; + } + if (onData) { + return new XMLDocumentCB(options, onData, onEnd); + } else { + return new XMLDocument(options); + } + }; + + module.exports.stringWriter = function(options) { + return new XMLStringWriter(options); + }; + + module.exports.streamWriter = function(stream, options) { + return new XMLStreamWriter(stream, options); + }; + + module.exports.implementation = new XMLDOMImplementation(); + + module.exports.nodeType = NodeType; + + module.exports.writerState = WriterState; + +}).call(this); diff --git a/node_modules/xmlbuilder/package.json b/node_modules/xmlbuilder/package.json new file mode 100644 index 0000000..512cd97 --- /dev/null +++ b/node_modules/xmlbuilder/package.json @@ -0,0 +1,39 @@ +{ + "name": "xmlbuilder", + "version": "11.0.1", + "keywords": [ + "xml", + "xmlbuilder" + ], + "homepage": "http://github.com/oozcitak/xmlbuilder-js", + "description": "An XML builder for node.js", + "author": "Ozgur Ozcitak ", + "contributors": [], + "license": "MIT", + "repository": { + "type": "git", + "url": "git://github.com/oozcitak/xmlbuilder-js.git" + }, + "bugs": { + "url": "http://github.com/oozcitak/xmlbuilder-js/issues" + }, + "main": "./lib/index", + "typings": "./typings/index.d.ts", + "engines": { + "node": ">=4.0" + }, + "dependencies": {}, + "devDependencies": { + "coffeescript": "1.*", + "mocha": "*", + "coffee-coverage": "2.*", + "istanbul": "*", + "coveralls": "*", + "xpath": "*" + }, + "scripts": { + "prepublishOnly": "coffee -co lib src", + "postpublish": "rm -rf lib", + "test": "mocha \"test/**/*.coffee\" && istanbul report text lcov" + } +} diff --git a/node_modules/xmlbuilder/typings/index.d.ts b/node_modules/xmlbuilder/typings/index.d.ts new file mode 100644 index 0000000..3e0e5b0 --- /dev/null +++ b/node_modules/xmlbuilder/typings/index.d.ts @@ -0,0 +1,153 @@ +// Type definitions for xmlbuilder +// Project: https://github.com/oozcitak/xmlbuilder-js +// Definitions by: Wallymathieu +// : GaikwadPratik +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +export = xmlbuilder; + +declare namespace xmlbuilder { + + class XMLDocType { + clone(): XMLDocType; + element(name: string, value?: Object): XMLDocType; + attList(elementName: string, attributeName: string, attributeType: string, defaultValueType?: string, defaultValue?: any): XMLDocType; + entity(name: string, value: any): XMLDocType; + pEntity(name: string, value: any): XMLDocType; + notation(name: string, value: any): XMLDocType; + cdata(value: string): XMLDocType; + comment(value: string): XMLDocType; + instruction(target: string, value: any): XMLDocType; + root(): XMLDocType; + document(): any; + toString(options?: XMLToStringOptions, level?: Number): string; + + ele(name: string, value?: Object): XMLDocType; + att(elementName: string, attributeName: string, attributeType: string, defaultValueType?: string, defaultValue?: any): XMLDocType; + ent(name: string, value: any): XMLDocType; + pent(name: string, value: any): XMLDocType; + not(name: string, value: any): XMLDocType; + dat(value: string): XMLDocType; + com(value: string): XMLDocType; + ins(target: string, value: any): XMLDocType; + up(): XMLDocType; + doc(): any; + } + + class XMLElementOrXMLNode { + // XMLElement: + clone(): XMLElementOrXMLNode; + attribute(name: any, value?: any): XMLElementOrXMLNode; + att(name: any, value?: any): XMLElementOrXMLNode; + removeAttribute(name: string): XMLElementOrXMLNode; + instruction(target: string, value: any): XMLElementOrXMLNode; + instruction(array: Array): XMLElementOrXMLNode; + instruction(obj: Object): XMLElementOrXMLNode; + ins(target: string, value: any): XMLElementOrXMLNode; + ins(array: Array): XMLElementOrXMLNode; + ins(obj: Object): XMLElementOrXMLNode; + a(name: any, value?: any): XMLElementOrXMLNode; + i(target: string, value: any): XMLElementOrXMLNode; + i(array: Array): XMLElementOrXMLNode; + i(obj: Object): XMLElementOrXMLNode; + toString(options?: XMLToStringOptions, level?: Number): string; + // XMLNode: + element(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + ele(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + insertBefore(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + insertAfter(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + remove(): XMLElementOrXMLNode; + node(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + text(value: string): XMLElementOrXMLNode; + cdata(value: string): XMLElementOrXMLNode; + comment(value: string): XMLElementOrXMLNode; + raw(value: string): XMLElementOrXMLNode; + declaration(version: string, encoding: string, standalone: boolean): XMLElementOrXMLNode; + doctype(pubID: string, sysID: string): XMLDocType; + up(): XMLElementOrXMLNode; + importDocument(input: XMLElementOrXMLNode): XMLElementOrXMLNode; + root(): XMLElementOrXMLNode; + document(): any; + end(options?: XMLEndOptions): string; + prev(): XMLElementOrXMLNode; + next(): XMLElementOrXMLNode; + nod(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + txt(value: string): XMLElementOrXMLNode; + dat(value: string): XMLElementOrXMLNode; + com(value: string): XMLElementOrXMLNode; + doc(): XMLElementOrXMLNode; + dec(version: string, encoding: string, standalone: boolean): XMLElementOrXMLNode; + dtd(pubID: string, sysID: string): XMLDocType; + e(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + n(name: any, attributes?: Object, text?: any): XMLElementOrXMLNode; + t(value: string): XMLElementOrXMLNode; + d(value: string): XMLElementOrXMLNode; + c(value: string): XMLElementOrXMLNode; + r(value: string): XMLElementOrXMLNode; + u(): XMLElementOrXMLNode; + } + + interface XMLDec { + version?: string; + encoding?: string; + standalone?: boolean; + } + + interface XMLDtd { + pubID?: string; + sysID?: string; + } + + interface XMLStringifier { + [x: string]: ((v: any) => string) | string; + } + + interface XMLWriter { + [x: string]: ((e: XMLElementOrXMLNode, options: WriterOptions, level?: number) => void); + } + + interface XMLCreateOptions { + headless?: boolean; + keepNullNodes?: boolean; + keepNullAttributes?: boolean; + ignoreDecorators?: boolean; + separateArrayItems?: boolean; + noDoubleEncoding?: boolean; + stringify?: XMLStringifier; + } + + interface XMLToStringOptions { + pretty?: boolean; + indent?: string; + offset?: number; + newline?: string; + allowEmpty?: boolean; + spacebeforeslash?: string; + } + + interface XMLEndOptions extends XMLToStringOptions { + writer?: XMLWriter; + } + + interface WriterOptions { + pretty?: boolean; + indent?: string; + newline?: string; + offset?: number; + allowEmpty?: boolean; + dontPrettyTextNodes?: boolean; + spaceBeforeSlash?: string | boolean; + user? :any; + state?: WriterState; + } + + enum WriterState { + None = 0, + OpenTag = 1, + InsideTag = 2, + CloseTag = 3 + } + + function create(nameOrObjSpec: string | { [name: string]: Object }, xmldecOrOptions?: XMLDec | XMLCreateOptions, doctypeOrOptions?: XMLDtd | XMLCreateOptions, options?: XMLCreateOptions): XMLElementOrXMLNode; + function begin(): XMLElementOrXMLNode; +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 258c987..d7e6ece 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,13 +1,16 @@ { - "name": "docker-slim-action", - "version": "1.0.0", + "name": "@kitabisa/docker-slim-action", + "version": "1.0.3", "lockfileVersion": 2, "requires": true, "packages": { "": { - "name": "docker-slim-action", - "version": "1.0.0", + "name": "@kitabisa/docker-slim-action", + "version": "1.0.3", "license": "MIT", + "dependencies": { + "@actions/cache": "^3.2.4" + }, "devDependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", @@ -20,11 +23,36 @@ "tar": "^6.1.13" } }, + "node_modules/@actions/cache": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz", + "integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "semver": "^6.3.1", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@actions/core": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", - "dev": true, "dependencies": { "@actions/http-client": "^2.0.1", "uuid": "^8.3.2" @@ -34,32 +62,275 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", - "dev": true, "dependencies": { "@actions/io": "^1.0.1" } }, + "node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "node_modules/@actions/http-client": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", - "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", - "dev": true, + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", + "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", "dependencies": { - "tunnel": "^0.0.6" + "tunnel": "^0.0.6", + "undici": "^5.25.4" } }, "node_modules/@actions/io": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz", - "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==", - "dev": true + "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==" + }, + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.7.1.tgz", + "integrity": "sha512-dyeQwvgthqs/SlPVQbZQetpslXceHd4i5a7M/7z/lGEAVwnSluabnQOjF2/dk/hhWgMISusv1Ytp4mQ8JNy62A==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-http": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.4.tgz", + "integrity": "sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-util": "^1.1.1", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/core-http/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.1.tgz", + "integrity": "sha512-kXSlrNHOCTVZMxpXNRqzgh9/j4cnNXU5Hf2YjMyjddRhCXFiFRzmNaqwN+XO9rGTsCOIaaG7M67zZdyliXZG9g==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.1.tgz", + "integrity": "sha512-3tKIQXSU3mlN+ITz0m2pXLnKK3oQ6/EVcW8ud011Iq+M0rx6Wnm7NUEpoMeOAEedeKlPtemrQzO6YWoDR71O5w==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.8.1.tgz", + "integrity": "sha512-L3voj0StUdJ+YKomvwnTv7gHzguJO+a6h30pmmZdRprJCM+RJlGMPxzuh4R7lhQu1jNmEtaHX5wvTgWLDAmbGQ==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util/node_modules/@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/logger": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.1.tgz", + "integrity": "sha512-/+4TtokaGgC+MnThdf6HyIH9Wrjp+CnCn3Nx3ggevN7FFjjNyjqg0yLlc2i9S+Z2uAzI8GYOo35Nzb1MhQ89MA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.5.0" + } + }, + "node_modules/@azure/ms-rest-js/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/@azure/storage-blob": { + "version": "12.17.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.17.0.tgz", + "integrity": "sha512-sM4vpsCpcCApagRW5UIjQNlNylo02my2opgp0Emi8x888hZUvJ3dN69Oq20cEGXkMUWnoCrBaB0zyS3yeB87sQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^3.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.8.0.tgz", + "integrity": "sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==", + "engines": { + "node": ">=8.0.0" + } }, "node_modules/@types/node": { "version": "18.14.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", - "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==", - "dev": true, - "optional": true + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" + }, + "node_modules/@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "dependencies": { + "@types/node": "*" + } }, "node_modules/@types/yauzl": { "version": "2.10.0", @@ -71,6 +342,36 @@ "@types/node": "*" } }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -89,6 +390,22 @@ "node": ">=10" } }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -106,6 +423,14 @@ } } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -115,6 +440,22 @@ "once": "^1.4.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/extract-zip": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", @@ -144,6 +485,19 @@ "pend": "~1.2.0" } }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, "node_modules/fs": { "version": "0.0.1-security", "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", @@ -201,6 +555,36 @@ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", "dev": true }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minipass": { "version": "4.2.4", "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz", @@ -253,6 +637,25 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -288,7 +691,6 @@ "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, "engines": { "node": ">= 0.6.0" } @@ -303,6 +705,19 @@ "once": "^1.3.1" } }, + "node_modules/sax": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", + "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/tar": { "version": "6.1.13", "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", @@ -320,15 +735,35 @@ "node": ">=10" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "dev": true, "engines": { "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, + "node_modules/undici": { + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, "node_modules/util": { "version": "0.10.4", "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", @@ -342,17 +777,50 @@ "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, "bin": { "uuid": "dist/bin/uuid" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -371,11 +839,34 @@ } }, "dependencies": { + "@actions/cache": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz", + "integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==", + "requires": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "semver": "^6.3.1", + "uuid": "^3.3.3" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } + }, "@actions/core": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", - "dev": true, "requires": { "@actions/http-client": "^2.0.1", "uuid": "^8.3.2" @@ -385,32 +876,239 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", - "dev": true, "requires": { "@actions/io": "^1.0.1" } }, + "@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "requires": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "@actions/http-client": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", - "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", - "dev": true, + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", + "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", "requires": { - "tunnel": "^0.0.6" + "tunnel": "^0.0.6", + "undici": "^5.25.4" } }, "@actions/io": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz", - "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==", - "dev": true + "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==" + }, + "@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "requires": { + "tslib": "^2.2.0" + } + }, + "@azure/core-auth": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.7.1.tgz", + "integrity": "sha512-dyeQwvgthqs/SlPVQbZQetpslXceHd4i5a7M/7z/lGEAVwnSluabnQOjF2/dk/hhWgMISusv1Ytp4mQ8JNy62A==", + "requires": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.6.2" + }, + "dependencies": { + "@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "requires": { + "tslib": "^2.6.2" + } + } + } + }, + "@azure/core-http": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.4.tgz", + "integrity": "sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-util": "^1.1.1", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.5.0" + }, + "dependencies": { + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + } + } + }, + "@azure/core-lro": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.1.tgz", + "integrity": "sha512-kXSlrNHOCTVZMxpXNRqzgh9/j4cnNXU5Hf2YjMyjddRhCXFiFRzmNaqwN+XO9rGTsCOIaaG7M67zZdyliXZG9g==", + "requires": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "dependencies": { + "@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "requires": { + "tslib": "^2.6.2" + } + } + } + }, + "@azure/core-paging": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.1.tgz", + "integrity": "sha512-3tKIQXSU3mlN+ITz0m2pXLnKK3oQ6/EVcW8ud011Iq+M0rx6Wnm7NUEpoMeOAEedeKlPtemrQzO6YWoDR71O5w==", + "requires": { + "tslib": "^2.6.2" + } + }, + "@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "requires": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + } + }, + "@azure/core-util": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.8.1.tgz", + "integrity": "sha512-L3voj0StUdJ+YKomvwnTv7gHzguJO+a6h30pmmZdRprJCM+RJlGMPxzuh4R7lhQu1jNmEtaHX5wvTgWLDAmbGQ==", + "requires": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "dependencies": { + "@azure/abort-controller": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz", + "integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==", + "requires": { + "tslib": "^2.6.2" + } + } + } + }, + "@azure/logger": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.1.tgz", + "integrity": "sha512-/+4TtokaGgC+MnThdf6HyIH9Wrjp+CnCn3Nx3ggevN7FFjjNyjqg0yLlc2i9S+Z2uAzI8GYOo35Nzb1MhQ89MA==", + "requires": { + "tslib": "^2.6.2" + } + }, + "@azure/ms-rest-js": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", + "requires": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.5.0" + }, + "dependencies": { + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + } + } + }, + "@azure/storage-blob": { + "version": "12.17.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.17.0.tgz", + "integrity": "sha512-sM4vpsCpcCApagRW5UIjQNlNylo02my2opgp0Emi8x888hZUvJ3dN69Oq20cEGXkMUWnoCrBaB0zyS3yeB87sQ==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^3.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + } + }, + "@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==" + }, + "@opentelemetry/api": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.8.0.tgz", + "integrity": "sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==" }, "@types/node": { "version": "18.14.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", - "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==", - "dev": true, - "optional": true + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" + }, + "@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "requires": { + "@types/node": "*", + "form-data": "^4.0.0" + }, + "dependencies": { + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + } + } + }, + "@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "requires": { + "@types/node": "*" + } }, "@types/yauzl": { "version": "2.10.0", @@ -422,6 +1120,33 @@ "@types/node": "*" } }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "requires": { + "event-target-shim": "^5.0.0" + } + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -434,6 +1159,19 @@ "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", "dev": true }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, "debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -443,6 +1181,11 @@ "ms": "2.1.2" } }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" + }, "end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -452,6 +1195,16 @@ "once": "^1.4.0" } }, + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" + }, "extract-zip": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", @@ -473,6 +1226,16 @@ "pend": "~1.2.0" } }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, "fs": { "version": "0.0.1-security", "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", @@ -520,6 +1283,27 @@ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", "dev": true }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, "minipass": { "version": "4.2.4", "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz", @@ -559,6 +1343,14 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "requires": { + "whatwg-url": "^5.0.0" + } + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -593,8 +1385,7 @@ "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" }, "pump": { "version": "3.0.0", @@ -606,6 +1397,16 @@ "once": "^1.3.1" } }, + "sax": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", + "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" + }, + "semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" + }, "tar": { "version": "6.1.13", "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", @@ -620,11 +1421,28 @@ "yallist": "^4.0.0" } }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "dev": true + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" + }, + "undici": { + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "requires": { + "@fastify/busboy": "^2.0.0" + } }, "util": { "version": "0.10.4", @@ -638,8 +1456,21 @@ "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } }, "wrappy": { "version": "1.0.2", @@ -647,6 +1478,20 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, + "xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 72fd8d2..9d1fae7 100644 --- a/package.json +++ b/package.json @@ -34,5 +34,8 @@ "os": "^0.1.2", "path": "^0.12.7", "tar": "^6.1.13" + }, + "dependencies": { + "@actions/cache": "^3.2.4" } } diff --git a/src/const.ts b/src/const.ts index 52e149e..69ef4cc 100644 --- a/src/const.ts +++ b/src/const.ts @@ -1,3 +1,4 @@ +export const cache = require('@actions/cache'); export const core = require('@actions/core'); export const fs = require('fs'); export const https = require('https'); diff --git a/src/index.ts b/src/index.ts index 71469ed..27efa1b 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,6 +1,6 @@ import { - core, fs, https, io, os, - path, shell, TMP_DIR + cache, core, fs, https, io, + os, path, shell, TMP_DIR } from './const'; const inputOverwrite = core.getBooleanInput('overwrite', {required: false}); @@ -46,7 +46,7 @@ async function get_slim() { VER = inputVersion; } } catch { - throw new Error('ERROR! Could not retrieve the current Slim version number.'); + throw new Error(`ERROR! Could not get the Slim version ${VER}.`); } URL = `https://downloads.dockerslim.com/releases/${VER}`; @@ -84,40 +84,65 @@ async function get_slim() { // Derive the filename FILENAME = `dist_${DIST}.${EXT}`; - const file = fs.createWriteStream(path.join(TMP_DIR, FILENAME)); - await new Promise((resolve, reject) => { - https.get(`${URL}/${FILENAME}`, (response) => { - response.pipe(file); - file.on('finish', () => { - file.close(); - resolve(file); + // Get the bin from cache + const cachePrefix = `slim-${VER}`; + const cacheKey = `${cachePrefix}-${DIST}`; + const cachePath = path.join('/tmp', cacheKey); + + try { + const cacheResult = await cache.restoreCache( + [ cachePath ], cacheKey, [ `${cachePrefix}-` ] + ); + + if (typeof cacheResult === 'undefined') { + throw new Error(`ERROR! Cache miss: ${cacheKey} was not found in the cache.`) + } + + core.debug(`${cacheKey} cache was restored.`) + + SLIM_PATH = cachePath; + } catch (e) { + const file = fs.createWriteStream(path.join(TMP_DIR, FILENAME)); + await new Promise((resolve, reject) => { + https.get(`${URL}/${FILENAME}`, (response) => { + response.pipe(file); + file.on('finish', () => { + file.close(); + resolve(file); + }); + }).on('error', (error) => { + fs.unlinkSync(path.join(TMP_DIR, FILENAME)); + reject(error); }); - }).on('error', (error) => { - fs.unlinkSync(path.join(TMP_DIR, FILENAME)); - reject(error); }); - }); - core.debug(`Unpacking ${path.join(TMP_DIR, FILENAME)}`); - if (EXT === 'zip') { - const extract = require('extract-zip'); - await extract(path.join(TMP_DIR, FILENAME), { - dir: TMP_DIR - }); - } else if (EXT === 'tar.gz') { - const tar = require('tar'); - await tar.x({ - file: path.join(TMP_DIR, FILENAME), - cwd: TMP_DIR - }); - } else { - throw new Error('ERROR! Unexpected file extension.'); - } + core.debug(`Unpacking ${path.join(TMP_DIR, FILENAME)}`); + if (EXT === 'zip') { + const extract = require('extract-zip'); + await extract(path.join(TMP_DIR, FILENAME), { + dir: TMP_DIR + }); + } else if (EXT === 'tar.gz') { + const tar = require('tar'); + await tar.x({ + file: path.join(TMP_DIR, FILENAME), + cwd: TMP_DIR + }); + } else { + throw new Error('ERROR! Unexpected file extension.'); + } - SLIM_PATH = path.join(TMP_DIR, `dist_${DIST}`); - core.addPath(SLIM_PATH); + SLIM_PATH = path.join(TMP_DIR, `dist_${DIST}`); - core.info(`Using slim version ${VER}`); + core.debug(`Copying ${SLIM_PATH} -> (${cachePath})`); + await io.cp(SLIM_PATH, cachePath, { recursive: true, force: true }); + + const cacheId = await cache.saveCache([ cachePath ], cacheKey); + core.debug(`${cacheKey} cache saved (ID: ${cacheId})`); + } finally { + core.addPath(SLIM_PATH); + core.info(`Using slim version ${VER}`); + } } async function run() { @@ -143,7 +168,7 @@ async function run() { throw new Error('ERROR! Cannot build over target'); } - const [image, tag] = report.target_reference.split(':') + const [image, tag] = report.target_reference.split(':'); if (inputOverwrite && tag) { core.info(`Overwriting ${image}:${tag} with slimmed version`);